aboutsummaryrefslogtreecommitdiff
path: root/potpourri
diff options
context:
space:
mode:
Diffstat (limited to 'potpourri')
-rw-r--r--potpourri/analyze-rcynic-history.py290
-rw-r--r--potpourri/analyze-transition.py88
-rw-r--r--potpourri/apnic-to-csv.py55
-rwxr-xr-xpotpourri/application-x-rpki-mailcap-handler.sh53
-rw-r--r--potpourri/arin-to-csv.py114
-rw-r--r--potpourri/cross_certify.py74
-rw-r--r--potpourri/csvgrep.py72
-rw-r--r--potpourri/expand-roa-prefixes.py79
-rw-r--r--potpourri/extract-key.py64
-rw-r--r--potpourri/fakerootd.py50
-rw-r--r--potpourri/find-roa-expiration.py61
-rw-r--r--potpourri/format-application-x-rpki.py132
-rw-r--r--potpourri/gc_summary.awk72
-rw-r--r--potpourri/gc_summary.py112
-rw-r--r--potpourri/generate-ripe-root-cert.py57
-rw-r--r--potpourri/gski.py21
-rw-r--r--potpourri/guess-roas-from-routeviews.py63
-rw-r--r--potpourri/iana-to-csv.py85
-rw-r--r--potpourri/missing-oids.py38
-rw-r--r--potpourri/object-dates.py63
-rw-r--r--potpourri/pcap-to-xml.sh36
-rw-r--r--potpourri/print-profile.py20
-rw-r--r--potpourri/rcynic-diff.py114
-rwxr-xr-xpotpourri/rcynic-lta1055
-rw-r--r--potpourri/rcynic-lta.yaml69
-rwxr-xr-xpotpourri/repo0-testbed-daily19
-rwxr-xr-xpotpourri/repo0-testbed-monthly22
-rwxr-xr-xpotpourri/repo0-testbed-weekly96
-rw-r--r--potpourri/ripe-asns-to-csv.py108
-rw-r--r--potpourri/ripe-prefixes-to-csv.awk37
-rw-r--r--potpourri/ripe-to-csv.awk124
-rw-r--r--potpourri/ripe-to-csv.py138
-rw-r--r--potpourri/roa-to-irr.py159
-rwxr-xr-xpotpourri/rp-sqlite425
-rw-r--r--potpourri/rp-sqlite.yaml53
l---------potpourri/rpki1
-rwxr-xr-xpotpourri/rpkidemo495
-rw-r--r--potpourri/rpkidemo.pem23
-rw-r--r--potpourri/rpkigui-flatten-roas.py37
-rw-r--r--potpourri/rpkigui-reset-demo.py48
-rw-r--r--potpourri/rpkigui-sync-users.py32
-rw-r--r--potpourri/rrd-rcynic-history.py201
-rw-r--r--potpourri/setup-rootd.sh36
-rw-r--r--potpourri/show-cms-ee-certs.sh25
-rw-r--r--potpourri/show-tracking-data.py39
-rw-r--r--potpourri/signed-object-dates.py40
-rw-r--r--potpourri/splitbase64.xsl66
-rw-r--r--potpourri/testbed-rootcert.py66
-rw-r--r--potpourri/translate-handles.py49
-rw-r--r--potpourri/upgrade-add-ghostbusters.py73
-rwxr-xr-xpotpourri/verify-bpki.sh43
-rw-r--r--potpourri/whack-ripe-asns.py83
-rw-r--r--potpourri/whack-ripe-prefixes.py101
-rw-r--r--potpourri/x509-dot.py170
54 files changed, 5746 insertions, 0 deletions
diff --git a/potpourri/analyze-rcynic-history.py b/potpourri/analyze-rcynic-history.py
new file mode 100644
index 00000000..b72d0741
--- /dev/null
+++ b/potpourri/analyze-rcynic-history.py
@@ -0,0 +1,290 @@
+# $Id$
+#
+# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse traffic data out of rynic XML output, whack it a bit, print some
+summaries and run gnuplot to draw some pictures.
+"""
+
+plot_all_hosts = False
+
+window_hours = 72
+
+import mailbox
+import sys
+import urlparse
+import os
+import datetime
+import subprocess
+import shelve
+
+from xml.etree.cElementTree import (ElementTree as ElementTree,
+ fromstring as ElementTreeFromString)
+
+def parse_utc(s):
+ return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ")
+
+class Rsync_History(object):
+ """
+ An Rsync_History object represents one rsync connection.
+ """
+
+ def __init__(self, elt):
+ self.error = elt.get("error")
+ self.uri = elt.text.strip()
+ self.hostname = urlparse.urlparse(self.uri).hostname or None
+ self.elapsed = parse_utc(elt.get("finished")) - parse_utc(elt.get("started"))
+
+class Host(object):
+ """
+ A host object represents all the data collected for one host. Note
+ that it (usually) contains a list of all the sessions in which this
+ host appears.
+ """
+
+ def __init__(self, hostname, session_id):
+ self.hostname = hostname
+ self.session_id = session_id
+ self.elapsed = datetime.timedelta(0)
+ self.connection_count = 0
+ self.dead_connections = 0
+ self.uris = set()
+ self.total_connection_time = datetime.timedelta(0)
+
+ def add_rsync_history(self, h):
+ self.connection_count += 1
+ self.elapsed += h.elapsed
+ self.dead_connections += int(h.error is not None)
+ self.total_connection_time += h.elapsed
+
+ def add_uri(self, u):
+ self.uris.add(u)
+
+ def finalize(self):
+ self.object_count = len(self.uris)
+ del self.uris
+
+ @property
+ def failed(self):
+ return 1 if self.dead_connections else 0
+
+ @property
+ def seconds_per_object(self):
+ if self.failed:
+ return None
+ else:
+ return float(self.elapsed.days * 24 * 60 * 60 +
+ self.elapsed.seconds +
+ self.elapsed.microseconds / 10**6) / float(self.object_count)
+
+ @property
+ def objects_per_connection(self):
+ if self.failed:
+ return None
+ else:
+ return float(self.object_count) / float(self.connection_count)
+
+ @property
+ def average_connection_time(self):
+ return float(self.total_connection_time.days * 24 * 60 * 60 +
+ self.total_connection_time.seconds +
+ self.total_connection_time.microseconds / 10**6) / float(self.connection_count)
+
+ class Format(object):
+
+ def __init__(self, attr, title, fmt, ylabel = ""):
+ self.attr = attr
+ self.title = title
+ self.width = len(title) - int("%" in fmt)
+ self.fmt = "%%%d%s" % (self.width, fmt)
+ self.oops = "*" * self.width
+ self.ylabel = ylabel
+
+ def __call__(self, obj):
+ try:
+ value = getattr(obj, self.attr)
+ return None if value is None else self.fmt % value
+ except ZeroDivisionError:
+ return self.oops
+
+ format = (Format("connection_count", "Connections", "d", "Connections To Repository (Per Session)"),
+ Format("object_count", "Objects", "d", "Objects In Repository (Distinct URIs Per Session)"),
+ Format("objects_per_connection", "Objects/Connection", ".3f", "Objects In Repository / Connections To Repository"),
+ Format("seconds_per_object", "Seconds/Object", ".3f", "Seconds To Transfer / Object (Average Per Session)"),
+ Format("failure_rate_running", "Failure Rate", ".3f%%", "Sessions With Failed Connections Within Last %d Hours" % window_hours),
+ Format("average_connection_time", "Average Connection", ".3f", "Seconds / Connection (Average Per Session)"),
+ Format("hostname", "Hostname", "s"))
+
+ format_dict = dict((fmt.attr, fmt) for fmt in format)
+
+ def format_field(self, name):
+ result = self.format_dict[name](self)
+ return None if result is None else result.strip()
+
+class Session(dict):
+ """
+ A session corresponds to one XML file. This is a dictionary of Host
+ objects, keyed by hostname.
+ """
+
+ def __init__(self, session_id, msg_key):
+ self.session_id = session_id
+ self.msg_key = msg_key
+ self.date = parse_utc(session_id)
+ self.calculated_failure_history = False
+
+ @property
+ def hostnames(self):
+ return set(self.iterkeys())
+
+ def get_plot_row(self, name, hostnames):
+ return (self.session_id,) + tuple(self[h].format_field(name) if h in self else "" for h in hostnames)
+
+ def add_rsync_history(self, h):
+ if h.hostname not in self:
+ self[h.hostname] = Host(h.hostname, self.session_id)
+ self[h.hostname].add_rsync_history(h)
+
+ def add_uri(self, u):
+ h = urlparse.urlparse(u).hostname
+ if h and h in self:
+ self[h].add_uri(u)
+
+ def finalize(self):
+ for h in self.itervalues():
+ h.finalize()
+
+ def calculate_failure_history(self, sessions):
+ start = self.date - datetime.timedelta(hours = window_hours)
+ sessions = tuple(s for s in sessions if s.date <= self.date and s.date > start)
+ for hostname, h in self.iteritems():
+ i = n = 0
+ for s in sessions:
+ if hostname in s:
+ i += s[hostname].failed
+ n += 1
+ h.failure_rate_running = float(100 * i) / n
+ self.calculated_failure_history = True
+
+def plotter(f, hostnames, field, logscale = False):
+ plotlines = sorted(session.get_plot_row(field, hostnames) for session in sessions)
+ title = Host.format_dict[field].title
+ ylabel = Host.format_dict[field].ylabel
+ n = len(hostnames) + 1
+ assert all(n == len(plotline) for plotline in plotlines)
+ if "%%" in Host.format_dict[field].fmt:
+ f.write('set format y "%.0f%%"\n')
+ else:
+ f.write('set format y\n')
+ if logscale:
+ f.write("set logscale y\n")
+ else:
+ f.write("unset logscale y\n")
+ f.write("""
+ set xdata time
+ set timefmt '%Y-%m-%dT%H:%M:%SZ'
+ #set format x '%m/%d'
+ #set format x '%b%d'
+ #set format x '%Y-%m-%d'
+ set format x '%Y-%m'
+ #set title '""" + title + """'
+ set ylabel '""" + ylabel + """'
+ plot""" + ",".join(" '-' using 1:2 with linespoints pointinterval 500 title '%s'" % h for h in hostnames) + "\n")
+ for i in xrange(1, n):
+ for plotline in plotlines:
+ if plotline[i] is not None:
+ f.write("%s %s\n" % (plotline[0], plotline[i].rstrip("%")))
+ f.write("e\n")
+
+def plot_hosts(hostnames, fields):
+ for field in fields:
+ for logscale in (False, True):
+ gnuplot = subprocess.Popen(("gnuplot",), stdin = subprocess.PIPE)
+ gnuplot.stdin.write("set terminal pdf\n")
+ gnuplot.stdin.write("set output '%s/%s-%s.pdf'\n" % (outdir, field, "log" if logscale else "linear"))
+ plotter(gnuplot.stdin, hostnames, field, logscale = logscale)
+ gnuplot.stdin.close()
+ gnuplot.wait()
+
+outdir = "images"
+
+if not os.path.exists(outdir):
+ os.makedirs(outdir)
+
+mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False)
+
+if sys.platform == "darwin": # Sigh
+ shelf = shelve.open("rcynic-xml", "c")
+else:
+ shelf = shelve.open("rcynic-xml.db", "c")
+
+sessions = []
+
+latest = None
+parsed = 0
+
+for i, key in enumerate(mb.iterkeys(), 1):
+ sys.stderr.write("\r%s %d/%d/%d..." % ("|\\-/"[i & 3], parsed, i, len(mb)))
+
+ if key in shelf:
+ session = shelf[key]
+
+ else:
+ assert not mb[key].is_multipart()
+ input = ElementTreeFromString(mb[key].get_payload())
+ date = input.get("date")
+ sys.stderr.write("%s..." % date)
+ session = Session(date, key)
+ for elt in input.findall("rsync_history"):
+ session.add_rsync_history(Rsync_History(elt))
+ for elt in input.findall("validation_status"):
+ if elt.get("generation") == "current":
+ session.add_uri(elt.text.strip())
+ session.finalize()
+ shelf[key] = session
+ parsed += 1
+
+ sessions.append(session)
+ if latest is None or session.session_id > latest.session_id:
+ latest = session
+
+sys.stderr.write("\n")
+
+shelf.sync()
+
+for session in sessions:
+ if not getattr(session, "calculated_failure_history", False):
+ session.calculate_failure_history(sessions)
+ shelf[session.msg_key] = session
+
+if plot_all_hosts:
+ hostnames = sorted(reduce(lambda x, y: x | y,
+ (s.hostnames for s in sessions),
+ set()))
+
+else:
+ hostnames = ("rpki.apnic.net", "rpki.ripe.net", "localcert.ripe.net",
+ "repository.lacnic.net", "rpki.afrinic.net", "rpki.arin.net",
+ "arin.rpki.net", "repo0.rpki.net", "rgnet.rpki.net")
+
+plot_hosts(hostnames, [fmt.attr for fmt in Host.format if fmt.attr != "hostname"])
+
+if latest is not None:
+ f = open("rcynic.xml", "wb")
+ f.write(mb[latest.msg_key].get_payload())
+ f.close()
+
+shelf.close()
diff --git a/potpourri/analyze-transition.py b/potpourri/analyze-transition.py
new file mode 100644
index 00000000..e2125dfb
--- /dev/null
+++ b/potpourri/analyze-transition.py
@@ -0,0 +1,88 @@
+# $Id$
+#
+# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC")
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Compare rcynic.xml files, tell the user what became invalid, and why.
+"""
+
+import sys
+
+try:
+ from lxml.etree import ElementTree
+except ImportError:
+ from xml.etree.ElementTree import ElementTree
+
+class Object(object):
+
+ def __init__(self, session, uri):
+ self.session = session
+ self.uri = uri
+ self.labels = []
+
+ def add(self, label):
+ self.labels.append(label)
+
+ def __cmp__(self, other):
+ return cmp(self.labels, other.labels)
+
+ @property
+ def accepted(self):
+ return "object_accepted" in self.labels
+
+class Session(dict):
+
+ def __init__(self, name):
+ self.name = name
+ tree = ElementTree(file = name)
+ labels = tuple((elt.tag.strip(), elt.text.strip()) for elt in tree.find("labels"))
+ self.labels = tuple(pair[0] for pair in labels)
+ self.descrs = dict(labels)
+ self.date = tree.getroot().get("date")
+ for elt in tree.findall("validation_status"):
+ status = elt.get("status")
+ uri = elt.text.strip()
+ if status.startswith("rsync_transfer_") or elt.get("generation") != "current":
+ continue
+ if uri not in self:
+ self[uri] = Object(self, uri)
+ self[uri].add(status)
+
+skip_labels = frozenset(("object_accepted", "object_rejected"))
+
+old_db = new_db = None
+
+for arg in sys.argv[1:]:
+
+ old_db = new_db
+ new_db = Session(arg)
+ if old_db is None:
+ continue
+
+ old_uris = frozenset(old_db)
+ new_uris = frozenset(new_db)
+
+ for uri in sorted(old_uris - new_uris):
+ print new_db.date, uri, "dropped"
+
+ for uri in sorted(old_uris & new_uris):
+ old = old_db[uri]
+ new = new_db[uri]
+ if old.accepted and not new.accepted:
+ print new_db.date, uri, "invalid"
+ labels = frozenset(new.labels) - frozenset(old.labels) - skip_labels
+ for label in new.labels:
+ if label in labels:
+ print " ", new_db.descrs[label]
diff --git a/potpourri/apnic-to-csv.py b/potpourri/apnic-to-csv.py
new file mode 100644
index 00000000..62293a51
--- /dev/null
+++ b/potpourri/apnic-to-csv.py
@@ -0,0 +1,55 @@
+# $Id$
+#
+# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse APNIC "Extended Allocation and Assignment" reports and write
+out (just) the RPKI-relevant fields in myrpki-format CSV syntax.
+"""
+
+from rpki.csv_utils import csv_writer
+from rpki.ipaddrs import v4addr
+
+asns = csv_writer("asns.csv")
+prefixes = csv_writer("prefixes.csv")
+
+for line in open("delegated-apnic-extended-latest"):
+
+ line = line.rstrip()
+
+ if not line.startswith("apnic|") or line.endswith("|summary"):
+ continue
+
+ try:
+ registry, cc, rectype, start, value, date, status, opaque_id = line.split("|")
+ except ValueError:
+ continue
+
+ if not opaque_id:
+ continue
+
+ assert registry == "apnic"
+
+ if rectype == "asn":
+ asns.writerow((opaque_id, "%s-%s" % (start, int(start) + int(value) - 1)))
+
+ elif rectype == "ipv4":
+ prefixes.writerow((opaque_id, "%s-%s" % (start, v4addr(v4addr(start) + long(value) - 1))))
+
+ elif rectype == "ipv6":
+ prefixes.writerow((opaque_id, "%s/%s" % (start, value)))
+
+asns.close()
+prefixes.close()
diff --git a/potpourri/application-x-rpki-mailcap-handler.sh b/potpourri/application-x-rpki-mailcap-handler.sh
new file mode 100755
index 00000000..891987d9
--- /dev/null
+++ b/potpourri/application-x-rpki-mailcap-handler.sh
@@ -0,0 +1,53 @@
+#!/bin/sh -
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Given the Maildir dump format, one can use Mutt as a viewer with two
+# tweaks:
+#
+# 1) Add to ~/.muttrc
+#
+# auto_view application/x-rpki
+#
+# 2) Add to ~/.mailcap
+#
+# application/x-rpki; /path/to/this/script.sh ; copiousoutput
+#
+# "copiousoutput" is required by mutt to enable auto_view (inline
+# display) behavior.
+#
+# This script could do fancier things (pretty XML formatting,
+# verification checks of the CMS, etcetera) if anybody cared.
+# For the moment the main use for this script is debugging.
+
+# We have to jump through some hoops to figure out where our OpenSSL
+# binary is. If you have already installed an OpenSSL binary that
+# understands CMS, feel free to use that instead.
+
+#exec 2>&1; set -x
+
+: ${AWK=/usr/bin/awk}
+: ${OPENSSL=$(/usr/bin/dirname $0)/../openssl/openssl/apps/openssl}
+: ${SPLITBASE64=$(/usr/bin/dirname $0)/splitbase64.xsl}
+: ${XMLINDENT=/usr/local/bin/xmlindent}
+: ${XMLLINT=/usr/local/bin/xmllint}
+: ${XSLTPROC=/usr/local/bin/xsltproc}
+
+# This produces prettier output, but also hangs sometimes, apparently some xmlindent bug dealing with really long XML attributes
+#OPENSSL_CONF=/dev/null $OPENSSL cms -verify -nosigs -noverify -inform DER 2>/dev/null | $XSLTPROC $SPLITBASE64 - | $XMLINDENT -i 2 | $AWK NF
+
+# So we do this instead
+OPENSSL_CONF=/dev/null $OPENSSL cms -verify -nosigs -noverify -inform DER 2>/dev/null | $XSLTPROC $SPLITBASE64 - | $XMLLINT -format -
diff --git a/potpourri/arin-to-csv.py b/potpourri/arin-to-csv.py
new file mode 100644
index 00000000..a4e7ffc3
--- /dev/null
+++ b/potpourri/arin-to-csv.py
@@ -0,0 +1,114 @@
+# $Id$
+#
+# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse an ARIN database research dump and write out (just) the
+RPKI-relevant fields in myrpki-format CSV syntax.
+
+NB: The input data for this script comes from ARIN under an agreement
+that allows research use but forbids redistribution, so if you think
+you need a copy of the data, please talk to ARIN about it, not us.
+
+Input format used to be RPSL WHOIS dump, but ARIN recently went Java,
+so we have to parse a 3.5GB XML "document". Credit to Liza Daly for
+explaining the incantations needed to convince lxml to do this nicely,
+see: http://www.ibm.com/developerworks/xml/library/x-hiperfparse/
+"""
+
+import sys
+import lxml.etree
+
+from rpki.csv_utils import csv_writer
+
+def ns(tag):
+ return "{http://www.arin.net/bulkwhois/core/v1}" + tag
+
+tag_asn = ns("asn")
+tag_net = ns("net")
+tag_org = ns("org")
+tag_poc = ns("poc")
+tag_orgHandle = ns("orgHandle")
+tag_netBlock = ns("netBlock")
+tag_type = ns("type")
+tag_startAddress = ns("startAddress")
+tag_endAddress = ns("endAddress")
+tag_startAsNumber = ns("startAsNumber")
+tag_endAsNumber = ns("endAsNumber")
+
+def find(node, tag):
+ return node.findtext(tag).strip()
+
+def do_asn(node):
+ asns.writerow((find(node, tag_orgHandle),
+ "%s-%s" % (find(node, tag_startAsNumber),
+ find(node, tag_endAsNumber))))
+
+erx_table = {
+ "AF" : "afrinic",
+ "AP" : "apnic",
+ "AR" : "arin",
+ "AV" : "arin",
+ "FX" : "afrinic",
+ "LN" : "lacnic",
+ "LX" : "lacnic",
+ "PV" : "apnic",
+ "PX" : "apnic",
+ "RN" : "ripe",
+ "RV" : "ripe",
+ "RX" : "ripe" }
+
+def do_net(node):
+ handle = find(node, tag_orgHandle)
+ for netblock in node.iter(tag_netBlock):
+ tag = find(netblock, tag_type)
+ startAddress = find(netblock, tag_startAddress)
+ endAddress = find(netblock, tag_endAddress)
+ if not startAddress.endswith(".000") and not startAddress.endswith(":0000"):
+ continue
+ if not endAddress.endswith(".255") and not endAddress.endswith(":FFFF"):
+ continue
+ if tag in ("DS", "DA", "IU"):
+ prefixes.writerow((handle, "%s-%s" % (startAddress, endAddress)))
+ elif tag in erx_table:
+ erx.writerow((erx_table[tag], "%s-%s" % (startAddress, endAddress)))
+
+dispatch = { tag_asn : do_asn, tag_net : do_net }
+
+asns = csv_writer("asns.csv")
+prefixes = csv_writer("prefixes.csv")
+erx = csv_writer("erx.csv")
+
+root = None
+
+for event, node in lxml.etree.iterparse(sys.stdin):
+
+ if root is None:
+ root = node
+ while root.getparent() is not None:
+ root = root.getparent()
+
+ if node.getparent() is root:
+
+ if node.tag in dispatch:
+ dispatch[node.tag](node)
+
+ node.clear()
+ while node.getprevious() is not None:
+ del node.getparent()[0]
+
+asns.close()
+prefixes.close()
+erx.close()
diff --git a/potpourri/cross_certify.py b/potpourri/cross_certify.py
new file mode 100644
index 00000000..fab7743b
--- /dev/null
+++ b/potpourri/cross_certify.py
@@ -0,0 +1,74 @@
+# $Id$
+#
+# Copyright (C) 2014 Dragon Research Labs ("DRL")
+# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC")
+# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notices and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL
+# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL,
+# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Cross-certification tool to issue a new certificate based on an old
+one that was issued by somebody else. The point of the exercise is to
+end up with a valid certificate in our own BPKI which has the same
+subject name and subject public key as the one we're replacing.
+"""
+
+import os
+import sys
+import time
+import argparse
+import rpki.x509
+import rpki.sundial
+
+os.environ["TZ"] = "UTC"
+time.tzset()
+
+parser = argparse.ArgumentParser(description = __doc__)
+parser.add_argument("-i", "--in", required = True, dest = "input",
+ type = lambda s: rpki.x509.X509(Auto_file = s),
+ help = "input certificate")
+parser.add_argument("-c", "--ca", required = True,
+ type = lambda s: rpki.x509.X509(Auto_file = s),
+ help = "issuing certificate")
+parser.add_argument("-k", "--key", required = True,
+ type = lambda s: rpki.x509.RSA(Auto_file = s),
+ help = "private key of issuing certificate")
+parser.add_argument("-s", "--serial", required = True,
+ help = "serial number file")
+parser.add_argument("-o", "--out",
+ help = "output filename")
+parser.add_argument("-l", "--lifetime",
+ type = rpki.sundial.timedelta, default = "30d",
+ help = "lifetime of generated certificate")
+args = parser.parse_args()
+
+now = rpki.sundial.now()
+notAfter = now + args.lifetime
+
+try:
+ with open(args.serial, "r") as f:
+ serial = int(f.read().splitlines()[0], 16)
+except IOError:
+ serial = 1
+
+cert = args.ca.cross_certify(args.key, args.input, serial, notAfter, now)
+
+with open(args.serial, "w") as f:
+ f.write("%02x\n" % (serial + 1))
+
+if args.out is None:
+ sys.stdout.write(cert.get_PEM())
+else:
+ with open(args.out, "w") as f:
+ f.write(cert.get_PEM())
diff --git a/potpourri/csvgrep.py b/potpourri/csvgrep.py
new file mode 100644
index 00000000..68bdd259
--- /dev/null
+++ b/potpourri/csvgrep.py
@@ -0,0 +1,72 @@
+# $Id$
+#
+# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Utility to simplify finding handles in one of the pseudo-RIR databases.
+
+Usage: python csvgrep.py datum [datum ...]
+
+where each datum is an ASN, IP address, or IP prefix.
+
+ASNs are recognized by being pure integers; IP addreses are recognized
+by having dots (IPv4) or colons (IPv6).
+
+After eating all of the command line arguments, we search asns.csv for
+any ASNs given, and prefixes.csv for any prefixes given.
+"""
+
+import sys
+from rpki.resource_set import resource_set_as, resource_set_ipv4, resource_set_ipv6
+from rpki.csv_utils import csv_reader
+
+asn = resource_set_as()
+ipv4 = resource_set_ipv4()
+ipv6 = resource_set_ipv6()
+
+for datum in sys.argv[1:]:
+ if datum.replace("-", "").isdigit():
+ t = asn
+ else:
+ t = ipv6 if ":" in datum else ipv4
+ if "-" not in datum and "/" not in datum:
+ datum = datum + "-" + datum
+ try:
+ t.append(t.parse_str(datum))
+ except:
+ print "Error attempting to parse", datum
+ raise
+
+#print "Looking for: ASNs %s IPv4 %s IPv6 %s" % (asn, ipv4, ipv6)
+
+def matches(set1, datum):
+ set2 = set1.__class__(datum)
+ if set1.intersection(set2):
+ return set2
+ else:
+ return False
+
+if asn:
+ for h, a in csv_reader("asns.csv", columns = 2):
+ m = matches(asn, a)
+ if m:
+ print h, m
+
+if ipv4 or ipv6:
+ for h, a in csv_reader("prefixes.csv", columns = 2):
+ t = ipv6 if ":" in a else ipv4
+ m = t and matches(t, a)
+ if m:
+ print h, m
diff --git a/potpourri/expand-roa-prefixes.py b/potpourri/expand-roa-prefixes.py
new file mode 100644
index 00000000..ae34ea0a
--- /dev/null
+++ b/potpourri/expand-roa-prefixes.py
@@ -0,0 +1,79 @@
+# $Id$
+#
+# Copyright (C) 2011 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+I got tired of trying to explain in English how the maxLength macro
+hack works in ROAs, so this is an attempt to explain it as code.
+
+Given one or more ROA prefix sets on the command line, this script
+prints out the expansion as a list of prefixes.
+"""
+
+import sys
+import rpki.resource_set
+import rpki.ipaddrs
+
+class NotAPrefix(Exception):
+ """
+ Address is not a proper prefix.
+ """
+
+class address_range(object):
+ """
+ Iterator for rpki.ipaddrs address objects.
+ """
+
+ def __init__(self, start, stop, step):
+ self.addr = start
+ self.stop = stop
+ self.step = step
+ self.type = type(start)
+
+ def __iter__(self):
+ while self.addr < self.stop:
+ yield self.addr
+ self.addr = self.type(self.addr + self.step)
+
+def main(argv):
+
+ prefix_sets = []
+ for arg in argv:
+ if ":" in arg:
+ prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv6(arg))
+ else:
+ prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv4(arg))
+
+ for prefix_set in prefix_sets:
+ sys.stdout.write("%s expands to:\n" % prefix_set)
+
+ prefix_type = prefix_set.range_type.datum_type
+ prefix_min = prefix_set.prefix
+ prefix_max = prefix_set.prefix + (1L << (prefix_type.bits - prefix_set.prefixlen))
+
+ for prefixlen in xrange(prefix_set.prefixlen, prefix_set.max_prefixlen + 1):
+
+ step = (1L << (prefix_type.bits - prefixlen))
+ mask = step - 1
+
+ for addr in address_range(prefix_min, prefix_max, step):
+ if (addr & mask) != 0:
+ raise NotAPrefix, "%s is not a /%d prefix" % (addr, prefixlen)
+ sys.stdout.write(" %s/%d\n" % (addr, prefixlen))
+
+ sys.stdout.write("\n")
+
+if __name__ == "__main__":
+ main(sys.argv[1:] if len(sys.argv) > 1 else ["18.0.0.0/8-24"])
diff --git a/potpourri/extract-key.py b/potpourri/extract-key.py
new file mode 100644
index 00000000..b85c3d55
--- /dev/null
+++ b/potpourri/extract-key.py
@@ -0,0 +1,64 @@
+# $Id$
+
+# Copyright (C) 2014 Dragon Research Labs ("DRL")
+# Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notices and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND AND ARIN DISCLAIM ALL
+# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR
+# ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
+# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA
+# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Extract a private key from rpkid's database.
+
+This is a debugging tool. rpkid goes to some trouble not to expose
+private keys, which is correct for normal operation, but for debugging
+it is occasionally useful to be able to extract the private key from
+MySQL. This script is just a convenience, it doesn't enable anything
+that couldn't be done via the mysql command line tool.
+
+While we're at this we also extract the corresponding certificate.
+"""
+
+import os
+import time
+import argparse
+import sys
+import MySQLdb
+import rpki.x509
+
+os.environ["TZ"] = "UTC"
+time.tzset()
+
+parser = argparse.ArgumentParser(description = __doc__)
+parser.add_argument("-s", "--self", required = True, help = "self handle")
+parser.add_argument("-b", "--bsc", required = True, help = "BSC handle")
+parser.add_argument("-u", "--user", required = True, help = "MySQL user name")
+parser.add_argument("-d", "--db", required = True, help = "MySQL database name")
+parser.add_argument("-p", "--password", required = True, help = "MySQL password")
+args = parser.parse_args()
+
+cur = MySQLdb.connect(user = args.user, db = args.db, passwd = args.password).cursor()
+
+cur.execute(
+ """
+ SELECT bsc.private_key_id, bsc.signing_cert
+ FROM bsc, self
+ WHERE self.self_handle = %s AND self.self_id = bsc.self_id AND bsc_handle = %s
+ """,
+ (args.self, args.bsc))
+
+key, cer = cur.fetchone()
+
+print rpki.x509.RSA(DER = key).get_PEM()
+
+if cer:
+ print rpki.x509.X509(DER = cer).get_PEM()
diff --git a/potpourri/fakerootd.py b/potpourri/fakerootd.py
new file mode 100644
index 00000000..6275a2a9
--- /dev/null
+++ b/potpourri/fakerootd.py
@@ -0,0 +1,50 @@
+# $Id$
+#
+# Copyright (C) 2011 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Hack to fake a catatonic rootd, for testing.
+"""
+
+import sys
+import socket
+import datetime
+import signal
+
+port = int(sys.argv[1]) if len(sys.argv) > 1 else 4405
+limit = int(sys.argv[2]) if len(sys.argv) > 2 else 5
+
+print "Listening on port", port
+
+s4 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+s4.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+s4.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+s4.bind(('', port))
+s4.listen(limit)
+
+s6 = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
+s6.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+s6.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+s6.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
+s6.bind(('::1', port))
+s6.listen(limit)
+
+print "Going to sleep at", datetime.datetime.utcnow()
+
+try:
+ signal.pause()
+except KeyboardInterrupt:
+ sys.exit(0)
+
diff --git a/potpourri/find-roa-expiration.py b/potpourri/find-roa-expiration.py
new file mode 100644
index 00000000..1401dc42
--- /dev/null
+++ b/potpourri/find-roa-expiration.py
@@ -0,0 +1,61 @@
+# $Id$
+#
+# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC")
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Look for ROAs for particular prefixes, like find_roa, then, for each
+ROA we find, dig out the expiration times of all the certificates
+involved in the authorization chain, all the way back to the root.
+"""
+
+import sys
+import subprocess
+import rpki.POW
+
+def filename_to_uri(filename):
+ if not filename.startswith(sys.argv[1]):
+ raise ValueError
+ return "rsync://" + filename[len(sys.argv[1]):].lstrip("/")
+
+def uri_to_filename(uri):
+ if not uri.startswith("rsync://"):
+ raise ValueError
+ return sys.argv[1] + "/" + uri[len("rsync://"):]
+
+def get_aia(x):
+ for i in xrange(x.countExtensions()):
+ ext = x.getExtension(i)
+ if ext[0] == "authorityInfoAccess":
+ return ext[2][ext[2].index("rsync://"):]
+ return None
+
+for line in subprocess.check_output(["find_roa"] + sys.argv[1:]).splitlines():
+
+ words = line.split()
+ fn = words.pop()
+ del words[-1]
+ print " ".join(words)
+
+ x = rpki.POW.CMS.derReadFile(fn).certs()[0]
+ uri = get_aia(x)
+ print x.getNotAfter(), filename_to_uri(fn)
+
+ while uri:
+ fn = uri_to_filename(uri)
+ x = rpki.POW.X509.derReadFile(fn)
+ print x.getNotAfter(), uri
+ uri = get_aia(x)
+
+ print
diff --git a/potpourri/format-application-x-rpki.py b/potpourri/format-application-x-rpki.py
new file mode 100644
index 00000000..184103f9
--- /dev/null
+++ b/potpourri/format-application-x-rpki.py
@@ -0,0 +1,132 @@
+# $Id$
+#
+# Copyright (C) 2014 Dragon Research Labs ("DRL")
+# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notices and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL
+# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR
+# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
+# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA
+# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Take the basic application/x-rpki messages that rpkid and friends
+log and translate them into a text version that's easier to search,
+without losing any of the original data. We use MH for the output
+format because nmh makes a handy viewer.
+"""
+
+import email.mime
+import email.mime.application
+import email.mime.text
+import email.mime.multipart
+import email.utils
+import email.encoders
+import mailbox
+import rpki.POW
+import lxml.etree
+import argparse
+import sys
+import base64
+
+parser = argparse.ArgumentParser(description = __doc__)
+parser.add_argument("-i", "--input", required = True,
+ help = "input Maildir")
+parser.add_argument("-m", "--mark", action = "store_true",
+ help = "mark seen messages")
+parser.add_argument("-k", "--kill", action = "store_true",
+ help = "kill seen messages")
+parser.add_argument("-o", "--output", required = True,
+ help = "output MH folder")
+parser.add_argument("-t", "--tag",
+ default = "{http://www.apnic.net/specs/rescerts/up-down/}message",
+ help = "XML namespace tag for an input message")
+parser.add_argument("-u", "--unseen", action = "store_true",
+ help = "only process unseen messages")
+args = parser.parse_args()
+
+def pprint_cert(b64):
+ return rpki.POW.X509.derRead(base64.b64decode(b64)).pprint()
+
+def up_down():
+ msg["X-RPKI-Up-Down-Type"] = xml.get("type")
+ msg["X-RPKI-Up-Down-Sender"] = xml.get("sender")
+ msg["X-RPKI-Up-Down-Recipient"] = xml.get("recipient")
+ msg["Subject"] = "Up-down %s %s => %s" % (xml.get("type"), xml.get("sender"), xml.get("recipient"))
+ for x in xml:
+ if x.tag.endswith("class"):
+ for y in x:
+ if y.tag.endswith("certificate") or y.tag.endswith("issuer"):
+ msg.attach(email.mime.text.MIMEText(pprint_cert(y.text)))
+
+def left_right():
+ msg["X-RPKI-Left-Right-Type"] = xml.get("type")
+ msg["Subject"] = "Left-right %s" % xml.get("type")
+
+def publication():
+ msg["X-RPKI-Left-Right-Type"] = xml.get("type")
+ msg["Subject"] = "Publication %s" % xml.get("type")
+
+dispatch = { "{http://www.apnic.net/specs/rescerts/up-down/}message" : up_down,
+ "{http://www.hactrn.net/uris/rpki/left-right-spec/}msg" : left_right,
+ "{http://www.hactrn.net/uris/rpki/publication-spec/}msg" : publication }
+
+def fix_headers():
+ if "X-RPKI-PID" in srcmsg or "X-RPKI-Object" in srcmsg:
+ msg["X-RPKI-PID"] = srcmsg["X-RPKI-PID"]
+ msg["X-RPKI-Object"] = srcmsg["X-RPKI-Object"]
+ else:
+ words = srcmsg["Subject"].split()
+ msg["X-RPKI-PID"] = words[1]
+ msg["X-RPKI-Object"] = " ".join(words[4:])
+
+destination = None
+source = None
+try:
+ destination = mailbox.MH(args.output, factory = None, create = True)
+ source = mailbox.Maildir(args.input, factory = None)
+
+ for srckey, srcmsg in source.iteritems():
+ if args.unseen and "S" in srcmsg.get_flags():
+ continue
+ assert not srcmsg.is_multipart() and srcmsg.get_content_type() == "application/x-rpki"
+ payload = srcmsg.get_payload(decode = True)
+ cms = rpki.POW.CMS.derRead(payload)
+ txt = cms.verify(rpki.POW.X509Store(), None, rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY)
+ xml = lxml.etree.fromstring(txt)
+ tag = xml.tag
+ if args.tag and tag != args.tag:
+ continue
+ msg = email.mime.multipart.MIMEMultipart("related")
+ msg["X-RPKI-Tag"] = tag
+ for i in ("Date", "Message-ID", "X-RPKI-Timestamp"):
+ msg[i] = srcmsg[i]
+ fix_headers()
+ if tag in dispatch:
+ dispatch[tag]()
+ if "Subject" not in msg:
+ msg["Subject"] = srcmsg["Subject"]
+ msg.attach(email.mime.text.MIMEText(txt))
+ msg.attach(email.mime.application.MIMEApplication(payload, "x-rpki"))
+ msg.epilogue = "\n" # Force trailing newline
+ key = destination.add(msg)
+ print "Added", key
+ if args.kill:
+ del source[srckey]
+ elif args.mark:
+ srcmsg.set_subdir("cur")
+ srcmsg.add_flag("S")
+ source[srckey] = srcmsg
+
+finally:
+ if destination:
+ destination.close()
+ if source:
+ source.close()
diff --git a/potpourri/gc_summary.awk b/potpourri/gc_summary.awk
new file mode 100644
index 00000000..b3b1bc6a
--- /dev/null
+++ b/potpourri/gc_summary.awk
@@ -0,0 +1,72 @@
+#!/usr/bin/awk -f
+
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Use gnuplot to graph interesting data from gc_summary lines in rpkid logs.
+
+BEGIN {
+ target = ENVIRON["TARGET"] ? ENVIRON["TARGET"] : "tuple";
+ outtype = ENVIRON["OUTTYPE"] ? ENVIRON["OUTTYPE"] : "png";
+ outname = ENVIRON["OUTNAME"] ? ENVIRON["OUTNAME"] : "";
+ print "set xdata time";
+ print "set timefmt '%Y-%m-%dT%H:%M:%S'";
+ #print "set format x '%d%b'";
+ print "set format x '%T'";
+ print "set key right bottom";
+ if (outname) {
+ print "set terminal", outtype;
+ print "set output '" outname "." outtype "'";
+ print "set term png size 1024,1024";
+ }
+ if (ARGC <= 2) {
+ print "plot '-' using 1:2 with linespoints title 'rpkid use of", target, "objects'";
+ } else {
+ cmd = "plot '-' using 1:2 with linespoints title '" ARGV[1] "'";
+ for (i = 2; i < ARGC; i++)
+ cmd = cmd ", '-' using 1:2 with linespoints title '" ARGV[i] "'";
+ print cmd;
+ }
+}
+
+FILENAME != filename && filename {
+ print "e";
+}
+
+FILENAME != filename {
+ print "#", FILENAME
+ filename = FILENAME;
+ proc = "";
+}
+
+$6 == target && proc != $3 && proc {
+ print "";
+}
+
+$6 == target && proc != $3 {
+ proc = $3;
+}
+
+$6 == target {
+ print "#", $0;
+ print $1 "T" $2, $5;
+}
+
+END {
+ print "e";
+ if (!outname)
+ print "pause mouse any";
+}
diff --git a/potpourri/gc_summary.py b/potpourri/gc_summary.py
new file mode 100644
index 00000000..1f6987bf
--- /dev/null
+++ b/potpourri/gc_summary.py
@@ -0,0 +1,112 @@
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Use gnuplot to graph interesting data from gc_summary lines in rpkid logs.
+
+import sys, os, time
+
+class datapoint(object):
+
+ outtype = os.getenv("OUTTYPE", "png")
+ outname = os.getenv("OUTNAME", "")
+ timefmt = os.getenv("TIMEFMT", "%T")
+ pretend = os.getenv("PRETEND_EVERYTHING_CHANGED", False)
+ threshold = int(os.getenv("THRESHOLD", "100"))
+
+ raw = []
+ filenames = []
+
+ def __init__(self, filename, timestamp, process, count, typesig, line):
+ self.filename = filename
+ self.timestamp = timestamp
+ self.process = process
+ self.count = count
+ self.typesig = typesig
+ self.line = line
+ self.key = "%s %s" % (filename, typesig)
+ self.raw.append(self)
+ if filename not in self.filenames:
+ self.filenames.append(filename)
+
+ def __cmp__(self, other):
+ c = cmp(self.key, other.key)
+ return c if c else cmp(self.timestamp, other.timestamp)
+
+ @classmethod
+ def plot(cls):
+
+ print "# [%s] Looking for interesting records" % time.strftime("%T")
+ changed = {}
+ for i in cls.raw:
+ if i.key not in changed:
+ changed[i.key] = set()
+ changed[i.key].add(i.count)
+ if cls.pretend:
+ changed = set(changed.iterkeys())
+ else:
+ changed = set(k for k, v in changed.iteritems() if max(v) - min(v) > cls.threshold)
+
+ if not changed:
+ print "# [%s] Apparently nothing worth reporting" % time.strftime("%T")
+ print "print 'Nothing to plot'"
+ return
+
+ print "# [%s] Header" % time.strftime("%T")
+ print "set xdata time"
+ print "set timefmt '%Y-%m-%dT%H:%M:%S'"
+ print "set format x '%s'" % cls.timefmt
+ print "set key right bottom"
+ if cls.outname:
+ print "set terminal", cls.outtype
+ print "set output '%s.%s'" % (cls.outname, cls.outtype)
+ print "set term png size 1024,1024"
+ print "plot", ", ".join("'-' using 1:2 with linespoints title '%s'" % i for i in changed)
+
+ print "# [%s] Sorting" % time.strftime("%T")
+ cls.raw.sort()
+
+ key = None
+ proc = None
+ for i in cls.raw:
+ if i.key not in changed:
+ continue
+ if key is not None and i.key != key:
+ print "e"
+ elif proc is not None and i.process != proc:
+ print ""
+ key = i.key
+ proc = i.process
+ print "#", i.key, i.line
+ print i.timestamp, i.count
+ print "e"
+ if not cls.outname:
+ print "pause mouse any"
+
+for filename in sys.argv[1:]:
+ print "# [%s] Reading %s" % (time.strftime("%T"), filename)
+ for line in open(filename):
+ if "gc_summary:" in line:
+ word = line.split(None, 6)
+ if word[4].isdigit() and word[5].startswith("(") and word[5].endswith(")"):
+ datapoint(filename = filename,
+ timestamp = word[0] + "T" + word[1],
+ process = word[2],
+ count = int(word[4]),
+ typesig = word[5],
+ line = line.strip())
+
+print "# [%s] Plotting" % time.strftime("%T")
+datapoint.plot()
diff --git a/potpourri/generate-ripe-root-cert.py b/potpourri/generate-ripe-root-cert.py
new file mode 100644
index 00000000..3407bc51
--- /dev/null
+++ b/potpourri/generate-ripe-root-cert.py
@@ -0,0 +1,57 @@
+# $Id$
+#
+# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse IANA XML data and write out just what we need to generate a root
+cert for Pseudo-RIPE.
+"""
+
+import sys
+import lxml.etree
+from rpki.csv_utils import csv_writer
+
+def iterate_xml(filename, tag):
+ return lxml.etree.parse(filename).getroot().getiterator(tag)
+
+def ns(tag):
+ return "{http://www.iana.org/assignments}" + tag
+
+tag_description = ns("description")
+tag_designation = ns("designation")
+tag_record = ns("record")
+tag_number = ns("number")
+tag_prefix = ns("prefix")
+
+asns = csv_writer("asns.csv")
+prefixes = csv_writer("prefixes.csv")
+
+for record in iterate_xml("as-numbers.xml", tag_record):
+ if record.findtext(tag_description) == "Assigned by RIPE NCC":
+ asns.writerow(("RIPE", record.findtext(tag_number)))
+
+for record in iterate_xml("ipv4-address-space.xml", tag_record):
+ if record.findtext(tag_designation) in ("RIPE NCC", "Administered by RIPE NCC"):
+ prefix = record.findtext(tag_prefix)
+ p, l = prefix.split("/")
+ assert l == "8", "Violated /8 assumption: %r" % prefix
+ prefixes.writerow(("RIPE", "%d.0.0.0/8" % int(p)))
+
+for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record):
+ if record.findtext(tag_description) == "RIPE NCC":
+ prefixes.writerow(("RIPE", record.findtext(tag_prefix)))
+
+asns.close()
+prefixes.close()
diff --git a/potpourri/gski.py b/potpourri/gski.py
new file mode 100644
index 00000000..083a59c8
--- /dev/null
+++ b/potpourri/gski.py
@@ -0,0 +1,21 @@
+# $Id$
+
+# Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+import rpki.x509, sys
+
+for file in sys.argv[1:]:
+ cert = rpki.x509.X509(Auto_file = file)
+ print cert.gSKI(), cert.hSKI(), file
diff --git a/potpourri/guess-roas-from-routeviews.py b/potpourri/guess-roas-from-routeviews.py
new file mode 100644
index 00000000..d8fb9c4c
--- /dev/null
+++ b/potpourri/guess-roas-from-routeviews.py
@@ -0,0 +1,63 @@
+# $Id$
+#
+# Copyright (C) 2009 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Pull RFC 3779 resources from a cert, attempt to mine routeviews (via
+DNS, using the dnspython toolkit) for what the ROAs might look like
+for the addresses found in the cert.
+
+This doesn't handle IPv6, because neither, apparently, does the
+routeviews DNS interface. Oh well.
+
+NB: this is wild-assed guessing at best. Even if the routeviews data
+were signed, which it is not, you have no particular reason to believe
+it. Do not use output of this script production. Sanity check.
+Beware of dog. If you issue ROAs using this script and your wallpaper
+peels, your cat explodes, or your children turn blue, it's your own
+fault for using this script. You have been warned.
+"""
+
+import sys
+import dns.resolver
+import rpki.x509
+from rpki.ipaddrs import v4addr
+from rpki.resource_set import roa_prefix_ipv4, resource_set_ipv4, resource_range_ipv4
+
+roas = []
+
+for filename in sys.argv[1:]:
+ resources = rpki.x509.X509(Auto_file = filename).get_3779resources().v4
+
+ while resources:
+ labels = str(resources[0].min).split(".")
+ labels.reverse()
+
+ try:
+ for answer in dns.resolver.query(".".join(labels) + ".asn.routeviews.org", "txt"):
+ asn, prefix, prefixlen = answer.strings
+ roa_prefix = roa_prefix_ipv4(v4addr(prefix), long(prefixlen))
+ roa = "%s\t%s\t%s" % (roa_prefix, long(asn), filename)
+ if roa not in roas:
+ roas.append(roa)
+ resources = resources.difference(resource_set_ipv4([roa_prefix.to_resource_range()]))
+
+ except dns.resolver.NXDOMAIN:
+ resources = resources.difference(resource_set_ipv4([resource_range_ipv4(resources[0].min, v4addr(resources[0].min + 256))]))
+
+roas.sort()
+
+for roa in roas:
+ print roa
diff --git a/potpourri/iana-to-csv.py b/potpourri/iana-to-csv.py
new file mode 100644
index 00000000..f803a21e
--- /dev/null
+++ b/potpourri/iana-to-csv.py
@@ -0,0 +1,85 @@
+# $Id$
+#
+# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse IANA XML data.
+"""
+
+import sys
+import lxml.etree
+from rpki.csv_utils import csv_reader, csv_writer
+from rpki.resource_set import resource_bag
+
+def iterate_xml(filename, tag):
+ return lxml.etree.parse(filename).getroot().getiterator(tag)
+
+def ns(tag):
+ return "{http://www.iana.org/assignments}" + tag
+
+tag_description = ns("description")
+tag_designation = ns("designation")
+tag_record = ns("record")
+tag_number = ns("number")
+tag_prefix = ns("prefix")
+tag_status = ns("status")
+
+handles = {}
+rirs = { "legacy" : resource_bag() }
+
+for rir in ("AfriNIC", "APNIC", "ARIN", "LACNIC", "RIPE NCC"):
+ handle = rir.split()[0].lower()
+ handles[rir] = handles["Assigned by %s" % rir] = handles["Administered by %s" % rir] = handle
+ rirs[handle] = resource_bag()
+
+asns = csv_writer("asns.csv")
+prefixes = csv_writer("prefixes.csv")
+
+for record in iterate_xml("as-numbers.xml", tag_record):
+ description = record.findtext(tag_description)
+ if description in handles:
+ asns.writerow((handles[description], record.findtext(tag_number)))
+
+for record in iterate_xml("ipv4-address-space.xml", tag_record):
+ designation = record.findtext(tag_designation)
+ if record.findtext(tag_status) != "RESERVED":
+ prefix, prefixlen = [int(i) for i in record.findtext(tag_prefix).split("/")]
+ if prefixlen != 8:
+ raise ValueError("%s violated /8 assumption" % record.findtext(tag_prefix))
+ rirs[handles.get(designation, "legacy")] |= resource_bag.from_str("%d.0.0.0/8" % prefix)
+
+for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record):
+ description = record.findtext(tag_description)
+ if record.findtext(tag_description) in handles:
+ rirs[handles[description]] |= resource_bag.from_str(record.findtext(tag_prefix))
+
+erx = list(csv_reader("erx.csv"))
+assert all(r in rirs for r, p in erx)
+
+erx_overrides = resource_bag.from_str(",".join(p for r, p in erx), allow_overlap = True)
+
+for rir in rirs:
+ if rir != "legacy":
+ rirs[rir] -= erx_overrides
+ rirs[rir] |= resource_bag.from_str(",".join(p for r, p in erx if r == rir), allow_overlap = True)
+
+for rir, bag in rirs.iteritems():
+ for p in bag.v4:
+ prefixes.writerow((rir, p))
+ for p in bag.v6:
+ prefixes.writerow((rir, p))
+
+asns.close()
+prefixes.close()
diff --git a/potpourri/missing-oids.py b/potpourri/missing-oids.py
new file mode 100644
index 00000000..16316eac
--- /dev/null
+++ b/potpourri/missing-oids.py
@@ -0,0 +1,38 @@
+# $Id$
+#
+# Copyright (C) 2008 American Registry for Internet Numbers ("ARIN")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Figure out what OIDs from rpki.oids are missing from dumpasn1's database.
+"""
+
+import rpki.POW.pkix, rpki.oids
+
+need_header = True
+
+for oid, name in rpki.oids.oid2name.items():
+ try:
+ rpki.POW.pkix.oid2obj(oid)
+ except:
+ o = rpki.POW.pkix.Oid()
+ o.set(oid)
+ if need_header:
+ print
+ print "# Local additions"
+ need_header = False
+ print
+ print "OID =", " ".join(("%02X" % ord(c)) for c in o.toString())
+ print "Comment = RPKI project"
+ print "Description =", name, "(" + " ".join((str(i) for i in oid)) + ")"
diff --git a/potpourri/object-dates.py b/potpourri/object-dates.py
new file mode 100644
index 00000000..b99441d6
--- /dev/null
+++ b/potpourri/object-dates.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# $Id$
+
+# Extract notBefore, notAfter, thisUpdate and nextUpdate dates from
+# RPKI objects.
+
+# Copyright (C) 2013--2014 Dragon Research Labs ("DRL")
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+import sys
+import os.path
+import rpki.POW
+
+extract_flags = (rpki.POW.CMS_NOCRL |
+ rpki.POW.CMS_NO_SIGNER_CERT_VERIFY |
+ rpki.POW.CMS_NO_ATTR_VERIFY |
+ rpki.POW.CMS_NO_CONTENT_VERIFY)
+
+def get_mft(fn):
+ cms = rpki.POW.Manifest.derReadFile(fn)
+ cms.verify(rpki.POW.X509Store(), None, extract_flags)
+ return cms, cms.certs()[0]
+
+def get_roa(fn):
+ return None, rpki.POW.CMS.derReadFile(fn).certs()[0]
+
+def get_gbr(fn):
+ return None, rpki.POW.CMS.derReadFile(fn).certs()[0]
+
+def get_crl(fn):
+ return rpki.POW.CRL.derReadFile(fn), None
+
+def get_cer(fn):
+ return None, rpki.POW.X509.derReadFile(fn)
+
+dispatch = dict(mft = get_mft,
+ roa = get_roa,
+ gbr = get_gbr,
+ crl = get_crl,
+ cer = get_cer)
+
+for fn in sys.argv[1:]:
+ obj, cer = dispatch[os.path.splitext(fn)[1][1:]](fn)
+ print fn
+ if cer is not None:
+ print "notBefore: ", cer.getNotBefore()
+ if obj is not None:
+ print "thisUpdate:", obj.getThisUpdate()
+ print "nextUpdate:", obj.getNextUpdate()
+ if cer is not None:
+ print "notAfter: ", cer.getNotAfter()
+ print
diff --git a/potpourri/pcap-to-xml.sh b/potpourri/pcap-to-xml.sh
new file mode 100644
index 00000000..73c30880
--- /dev/null
+++ b/potpourri/pcap-to-xml.sh
@@ -0,0 +1,36 @@
+#!/bin/sh -
+# $Id$
+#
+# Copyright (C) 2011 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Zeroeth cut at a packet decoder for RPKI up-down / left-right /
+# publication traffic captured off the wire. Needs work, not suitable
+# for general use, depends on a bunch of external programs that I
+# happen to have installed...but has been useful to me.
+
+for p in *.pcap
+do
+ tcptrace -e $p
+ for i in *.dat
+ do
+ j=${i%_contents.dat}
+ sed '1,/^ $/d' $i >$j.der
+ openssl cms -verify -noverify -inform DER -in $j.der | xmlindent > $j.xml
+ k=$(dumpasn1 -a $j.der 2>/dev/null | awk 'BEGIN {FS = "[ \t/:]+"} /signingTime/ {nr = NR + 2} NR == nr {print $6 "-" $5 "-" $4 "T" $7 ":" $8 ":" $9 "Z"}')
+ mv $j.der $k.$j.der
+ mv $j.xml $k.$j.xml
+ rm $i
+ done
+done
diff --git a/potpourri/print-profile.py b/potpourri/print-profile.py
new file mode 100644
index 00000000..081d2602
--- /dev/null
+++ b/potpourri/print-profile.py
@@ -0,0 +1,20 @@
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium, Inc. ("ISC")
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+import pstats, glob
+
+for f in glob.iglob("*.prof"):
+ pstats.Stats(f).sort_stats("time").print_stats(50)
diff --git a/potpourri/rcynic-diff.py b/potpourri/rcynic-diff.py
new file mode 100644
index 00000000..327a7b71
--- /dev/null
+++ b/potpourri/rcynic-diff.py
@@ -0,0 +1,114 @@
+# $Id$
+#
+# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC")
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Diff a series of rcynic.xml files, sort of.
+"""
+
+import sys
+
+try:
+ from lxml.etree import ElementTree
+except ImportError:
+ from xml.etree.ElementTree import ElementTree
+
+show_backup_generation = False
+show_rsync_transfer = False
+
+class Object(object):
+
+ def __init__(self, session, uri, generation):
+ self.session = session
+ self.uri = uri
+ self.generation = generation
+ self.labels = []
+
+ def add(self, label):
+ self.labels.append(label)
+
+ def __cmp__(self, other):
+ return cmp(self.labels, other.labels)
+
+def show(old = None, new = None):
+ assert old is not None or new is not None
+ assert old is None or new is None or old.uri == new.uri
+ if old is None:
+ obj = new
+ labels = ["+" + label for label in new.labels]
+ elif new is None:
+ obj = old
+ labels = ["-" + label for label in old.labels]
+ else:
+ obj = new
+ labels = []
+ for label in new.session.labels:
+ if label in new.labels and label in old.labels:
+ labels.append(label)
+ elif label in new.labels:
+ labels.append("+" + label)
+ elif label in old.labels:
+ labels.append("-" + label)
+ labels = " ".join(labels)
+ if show_backup_generation:
+ print " ", obj.uri, obj.generation, labels
+ else:
+ print " ", obj.uri, labels
+
+class Session(dict):
+
+ def __init__(self, name):
+ self.name = name
+ tree = ElementTree(file = name)
+ self.labels = [elt.tag.strip() for elt in tree.find("labels")]
+ for elt in tree.findall("validation_status"):
+ generation = elt.get("generation")
+ status = elt.get("status")
+ uri = elt.text.strip()
+ if not show_rsync_transfer and status.startswith("rsync_transfer_"):
+ continue
+ if show_backup_generation:
+ key = (uri, generation)
+ elif generation == "backup":
+ continue
+ else:
+ key = uri
+ if key not in self:
+ self[key] = Object(self, uri, generation)
+ self[key].add(status)
+
+old_db = new_db = None
+
+for arg in sys.argv[1:]:
+
+ old_db = new_db
+ new_db = Session(arg)
+
+ if old_db is None:
+ continue
+
+ only_old = set(old_db) - set(new_db)
+ only_new = set(new_db) - set(old_db)
+ changed = set(key for key in (set(old_db) & set(new_db)) if old_db[key] != new_db[key])
+
+ if only_old or changed or only_new:
+ print "Comparing", old_db.name, "with", new_db.name
+ for key in sorted(only_old):
+ show(old = old_db[key])
+ for key in sorted(changed):
+ show(old = old_db[key], new = new_db[key])
+ for key in sorted(only_new):
+ show(new = new_db[key])
+ print
diff --git a/potpourri/rcynic-lta b/potpourri/rcynic-lta
new file mode 100755
index 00000000..4c55db92
--- /dev/null
+++ b/potpourri/rcynic-lta
@@ -0,0 +1,1055 @@
+#!/usr/local/bin/python
+
+# $Id$
+
+# Copyright (C) 2013 Dragon Research Labs ("DRL")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+########################################################################
+#
+# DANGER WILL ROBINSON
+#
+# This is a PROTOTYPE of a local trust anchor mechanism. At the
+# moment, it DOES NOT WORK by any sane standard of measurement. It
+# produces output, but there is no particular reason to believe said
+# output is useful, and fairly good reason to believe that it is not.
+#
+# With luck, this may eventually mutate into something useful. For
+# now, just leave it alone unless you really know what you are doing,
+# in which case, on your head be it.
+#
+# YOU HAVE BEEN WARNED
+#
+########################################################################
+
+import os
+import sys
+import yaml
+import glob
+import time
+import shutil
+import base64
+import socket
+import sqlite3
+import weakref
+import rpki.POW
+import rpki.x509
+import rpki.sundial
+import rpki.resource_set
+
+# Teach SQLite3 about our data types.
+
+sqlite3.register_adapter(rpki.POW.IPAddress,
+ lambda x: buffer("_" + x.toBytes()))
+
+sqlite3.register_converter("RangeVal",
+ lambda s: long(s) if s.isdigit() else rpki.POW.IPAddress.fromBytes(s[1:]))
+
+sqlite3.register_adapter(rpki.x509.X501DN, str)
+
+
+class main(object):
+
+ tal_directory = None
+ constraints = None
+ rcynic_input = None
+ rcynic_output = None
+ tals = None
+ keyfile = None
+
+ ltakey = None
+ ltacer = None
+
+ ltauri = "rsync://localhost/lta"
+ ltasia = ltauri + "/"
+ ltaaia = ltauri + ".cer"
+ ltamft = ltauri + "/lta.mft"
+ ltacrl = ltauri + "/lta.crl"
+
+ cer_delta = rpki.sundial.timedelta(days = 7)
+ crl_delta = rpki.sundial.timedelta(hours = 1)
+
+ all_mentioned_resources = rpki.resource_set.resource_bag()
+
+
+ def __init__(self):
+ print "Parsing YAML"
+ self.parse_yaml()
+ print
+ print "Parsing TALs"
+ self.parse_tals()
+ print
+ print "Creating DB"
+ self.rpdb = RPDB(self.db_name)
+ print
+ print "Creating CA"
+ self.create_ca()
+ print
+ print "Loading DB"
+ self.rpdb.load(self.rcynic_input)
+ print
+ print "Processing adds and drops"
+ self.process_add_drop()
+ print
+ print "Processing deletions"
+ self.process_constraint_deletions()
+ print
+ print "Re-parenting TAs"
+ self.re_parent_tas()
+ print
+ print "Generating CRL and manifest"
+ self.generate_crl_and_manifest()
+ print
+ print "Committing final changes to DB"
+ self.rpdb.commit()
+ print
+ print "Dumping para-objects"
+ self.rpdb.dump_paras(self.rcynic_output)
+ print
+ print "Closing DB"
+ self.rpdb.close()
+
+
+ def create_ca(self):
+ self.serial = Serial()
+ self.ltakey = rpki.x509.RSA.generate(quiet = True)
+ cer = OutgoingX509.self_certify(
+ cn = "%s LTA Root Certificate" % socket.getfqdn(),
+ keypair = self.ltakey,
+ subject_key = self.ltakey.get_RSApublic(),
+ serial = self.serial(),
+ sia = (self.ltasia, self.ltamft, None),
+ notAfter = rpki.sundial.now() + self.cer_delta,
+ resources = rpki.resource_set.resource_bag.from_str("0-4294967295,0.0.0.0/0,::/0"))
+ subject_id = self.rpdb.find_keyname(cer.getSubject(), cer.get_SKI())
+ self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) "
+ "VALUES (?, 'cer', ?, ?, ?, ?)",
+ (buffer(cer.get_DER()), subject_id, subject_id, self.ltaaia,
+ buffer(self.ltakey.get_DER())))
+ self.ltacer = self.rpdb.find_outgoing_by_id(self.rpdb.cur.lastrowid)
+
+
+ def parse_yaml(self, fn = "rcynic-lta.yaml"):
+ y = yaml.safe_load(open(fn, "r"))
+ self.db_name = y["db-name"]
+ self.tal_directory = y["tal-directory"]
+ self.rcynic_input = y["rcynic-input"]
+ self.rcynic_output = y["rcynic-output"]
+ self.keyfile = y["keyfile"]
+ self.constraints = [Constraint(yc) for yc in y["constraints"]]
+
+
+ def parse_tals(self):
+ self.tals = {}
+ for fn in glob.iglob(os.path.join(self.tal_directory, "*.tal")):
+ with open(fn, "r") as f:
+ uri = f.readline().strip()
+ key = rpki.POW.Asymmetric.derReadPublic(base64.b64decode(f.read()))
+ self.tals[uri] = key
+
+
+ @staticmethod
+ def show_candidates(constraint, candidates):
+ print
+ print "Constraint:", repr(constraint)
+ print "Resources: ", constraint.mentioned_resources
+ for i, candidate in enumerate(candidates):
+ print " Candidate #%d id %d depth %d name %s uri %s" % (
+ i, candidate.rowid,
+ candidate.depth,
+ candidate.subject_name,
+ candidate.uri)
+ if constraint.mentioned_resources <= candidate.resources:
+ print " Matched"
+ #print " Constraint resources:", constraint.mentioned_resources
+ #print " Candidate resources: ", candidate.resources
+ break
+ else:
+ print " No match"
+
+
+ def process_add_drop(self):
+ #
+ # We probably need to create the output root before running this,
+ # otherwise there's a chance that an "add" constraint will yield
+ # no viable candidate parent. Not likely to happen with current
+ # test setup where several of our roots claim 0/0.
+ #
+ for constraint in self.constraints:
+ candidates = self.rpdb.find_by_resource_bag(constraint.mentioned_resources)
+ candidates.sort(reverse = True, key = lambda candidate: candidate.depth)
+ #self.show_candidates(constraint, candidates)
+ constraint.drop(candidates)
+ constraint.add(candidates)
+
+
+ def process_constraint_deletions(self):
+ for obj in self.rpdb.find_by_resource_bag(self.all_mentioned_resources):
+ self.add_para(obj, obj.resources - self.all_mentioned_resources)
+
+
+ def re_parent_tas(self):
+ for uri, key in self.tals.iteritems():
+ for ta in self.rpdb.find_by_ski_or_uri(key.calculateSKI(), uri):
+ if ta.para_obj is None:
+ self.add_para(ta, ta.resources - self.all_mentioned_resources)
+
+
+ def add_para(self, obj, resources):
+ return self.rpdb.add_para(
+ obj = obj,
+ resources = resources,
+ serial = self.serial,
+ ltacer = self.ltacer,
+ ltasia = self.ltasia,
+ ltaaia = self.ltaaia,
+ ltamft = self.ltamft,
+ ltacrl = self.ltacrl,
+ ltakey = self.ltakey)
+
+
+ def generate_crl_and_manifest(self):
+ thisUpdate = rpki.sundial.now()
+ nextUpdate = thisUpdate + self.crl_delta
+ serial = self.serial()
+ issuer = self.ltacer.getSubject()
+ aki = buffer(self.ltacer.get_SKI())
+
+ crl = OutgoingCRL.generate(
+ keypair = self.ltakey,
+ issuer = self.ltacer,
+ serial = serial,
+ thisUpdate = thisUpdate,
+ nextUpdate = nextUpdate,
+ revokedCertificates = ())
+
+ issuer_id = self.rpdb.find_keyname(issuer, aki)
+
+ self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri) "
+ "VALUES (?, 'crl', NULL, ?, ?)",
+ (buffer(crl.get_DER()), issuer_id, self.ltacrl))
+ crl = self.rpdb.find_outgoing_by_id(self.rpdb.cur.lastrowid)
+
+ key = rpki.x509.RSA.generate(quiet = True)
+
+ cer = self.ltacer.issue(
+ keypair = self.ltakey,
+ subject_key = key.get_RSApublic(),
+ serial = serial,
+ sia = (None, None, self.ltamft),
+ aia = self.ltaaia,
+ crldp = self.ltacrl,
+ resources = rpki.resource_set.resource_bag.from_inheritance(),
+ notAfter = self.ltacer.getNotAfter(),
+ is_ca = False)
+
+ # Temporary kludge, need more general solution but that requires
+ # more refactoring than I feel like doing this late in the day.
+ #
+ names_and_objs = [(uri, OutgoingObject.create(fn2 = fn2, der = der, uri = uri,
+ rpdb = None, rowid = None,
+ subject_id = None, issuer_id = None))
+ for fn2, der, uri in
+ self.rpdb.cur.execute("SELECT fn2, der, uri FROM outgoing WHERE issuer = ?",
+ (self.ltacer.rowid,))]
+
+ mft = OutgoingSignedManifest.build(
+ serial = serial,
+ thisUpdate = thisUpdate,
+ nextUpdate = nextUpdate,
+ names_and_objs = names_and_objs,
+ keypair = key,
+ certs = cer)
+
+ subject_id = self.rpdb.find_keyname(cer.getSubject(), cer.get_SKI())
+
+ self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) "
+ "VALUES (?, 'mft', ?, ?, ?, ?)",
+ (buffer(mft.get_DER()), subject_id, issuer_id, self.ltamft, buffer(key.get_DER())))
+
+
+ @staticmethod
+ def parse_xki(s):
+ """
+ Parse text form of an SKI or AKI. We accept two encodings:
+ colon-delimited hexadecimal, and URL-safe Base64. The former is
+ what OpenSSL prints in its text representation of SKI and AKI
+ extensions; the latter is the g(SKI) value that some RPKI CA engines
+ (including rpkid) use when constructing filenames.
+
+ In either case, we check that the decoded result contains the right
+ number of octets to be a SHA-1 hash.
+ """
+
+ if ":" in s:
+ b = "".join(chr(int(c, 16)) for c in s.split(":"))
+ else:
+ b = base64.urlsafe_b64decode(s + ("=" * (4 - len(s) % 4)))
+ if len(b) != 20:
+ raise RuntimeError("Bad length for SHA1 xKI value: %r" % s)
+ return b
+
+
+
+class Serial(object):
+
+ def __init__(self):
+ self.value = long(time.time()) << 32
+
+ def __call__(self):
+ self.value += 1
+ return self.value
+
+
+class ConstrainedObject(object):
+ # I keep expecting the classes derived from this to have some common
+ # methods, but so far it hasn't happened. Clean up eventually if not.
+ pass
+
+class ConstrainedROA(ConstrainedObject):
+
+ def __init__(self, constraint, y):
+ self.constraint = constraint
+ self.asn = long(y["asn"]) if y is not None else None
+ self.maxlen = long(y["maxlen"]) if y is not None and "maxlen" in y else None
+
+ def drop(self, candidates):
+ for candidate in candidates:
+ if isinstance(candidate, IncomingROA) and \
+ self.constraint.mentioned_resources == candidate.resources and \
+ (self.asn is None or self.asn == candidate.get_POW().getASID()):
+ print "Dropping ROA %r" % candidate
+ candidate.disposition = "delete"
+
+ def add(self, candidates):
+ assert self.asn is not None
+ for candidate in candidates:
+ if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources <= candidate.resources:
+ print "Should add ROA %s %s\nunder candidate %s (depth %s resources %s)" % (
+ self.asn, self.constraint.prefixes, candidate.subject_name, candidate.depth, candidate.resources)
+ break
+
+class ConstrainedGBR(ConstrainedObject):
+
+ def __init__(self, constraint, y):
+ self.constraint = constraint
+ self.vcard = y
+
+ def drop(self, candidates):
+ for candidate in candidates:
+ if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources == candidate.resources:
+ print "Dropping GBRs directly under %r" % candidate
+ for gbr in candidate.find_children("gbr"):
+ print "Dropping GBR %r" % gbr
+ gbr.disposition = "delete"
+
+ def add(self, candidates):
+ assert self.vcard is not None
+ for candidate in candidates:
+ if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources <= candidate.resources:
+ print "Should add GBR\n%s\nunder candidate %s (depth %s resources %s)" % (
+ "\n".join((" " * 4) + line for line in self.vcard.splitlines()),
+ candidate.subject_name, candidate.depth, candidate.resources)
+ break
+
+class ConstrainedRTR(ConstrainedObject):
+
+ def __init__(self, constraint, y):
+ self.constraint = constraint
+ self.key = y["key"] if y is not None else None
+ self.subject = y["subject"] if y is not None else None
+
+ def add(self, candidates):
+ raise NotImplementedError
+
+ def drop(self, candidates):
+ for candidate in candidates:
+ if isinstance(candidate, IncomingX509) and not candidate.is_ca and \
+ self.constraint.mentioned_resources == candidate.resources and \
+ (self.subject is None or candidate.getSubject() == self.subject):
+ print "Dropping RTR certificate %r" % candidate
+ candidate.disposition = "delete"
+
+class Constraint(object):
+
+ dispatch = dict(roa = ConstrainedROA,
+ gbr = ConstrainedGBR,
+ rtr = ConstrainedRTR)
+
+ def __init__(self, y):
+ self.y = y # Mostly for debugging. I think.
+ self.prefixes = rpki.resource_set.resource_bag.from_str(str(y.get("prefix", "")))
+ self.asns = rpki.resource_set.resource_bag.from_str(str(y.get("asn", "")))
+ self.init_drops(y.get("drop", ()))
+ self.init_adds( y.get("add", ()))
+
+ def init_drops(self, drops):
+ if drops == "all":
+ self.drops = tuple(d(self, None) for d in self.dispatch.itervalues())
+ else:
+ dd = []
+ for d in (drops if isinstance(drops, (list, tuple)) else [drops]):
+ if isinstance(d, str):
+ dd.append(self.dispatch[d[:-1]](self, None))
+ elif isinstance(d, dict) and len(d) == 1:
+ dd.append(self.dispatch[d.keys()[0]](self, d.values()[0]))
+ else:
+ raise ValueError("Unexpected drop clause " + repr(drops))
+ self.drops = tuple(dd)
+
+ def init_adds(self, adds):
+ if not all(isinstance(a, dict) and len(a) == 1 for a in adds):
+ raise ValueError("Expected list of single-entry mappings, got " + repr(adds))
+ self.adds = tuple(self.dispatch[a.keys()[0]](self, a.values()[0]) for a in adds)
+
+ def drop(self, candidates):
+ for d in self.drops:
+ d.drop(candidates)
+
+ def add(self, candidates):
+ for a in self.adds:
+ a.add(candidates)
+
+ def __repr__(self):
+ return "<%s:%s %r>" % (self.__class__.__module__, self.__class__.__name__, self.y)
+
+ @property
+ def mentioned_resources(self):
+ return self.prefixes | self.asns
+
+
+class BaseObject(object):
+ """
+ Mixin to add some SQL-related methods to classes derived from
+ rpki.x509.DER_object.
+ """
+
+ _rpdb = None
+ _rowid = None
+ _fn2 = None
+ _fn2map = None
+ _uri = None
+ _subject_id = None
+ _issuer_id = None
+
+ @property
+ def rowid(self):
+ return self._rowid
+
+ @property
+ def para_resources(self):
+ return self.resources if self.para_obj is None else self.para_obj.resources
+
+ @property
+ def fn2(self):
+ return self._fn2
+
+ @property
+ def uri(self):
+ return self._uri
+
+ @classmethod
+ def setfn2map(cls, **map):
+ cls._fn2map = map
+ for k, v in map.iteritems():
+ v._fn2 = k
+
+ @classmethod
+ def create(cls, rpdb, rowid, fn2, der, uri, subject_id, issuer_id):
+ self = cls._fn2map[fn2]()
+ if der is not None:
+ self.set(DER = der)
+ self._rpdb = rpdb
+ self._rowid = rowid
+ self._uri = uri
+ self._subject_id = subject_id
+ self._issuer_id = issuer_id
+ return self
+
+ @property
+ def subject_id(self):
+ return self._subject_id
+
+ @property
+ def subject_name(self):
+ return self._rpdb.find_keyname_by_id(self._subject_id)[0]
+
+ @property
+ def issuer_id(self):
+ return self._issuer_id
+
+ @property
+ def issuer_name(self):
+ return self._rpdb.find_keyname_by_id(self._subject_id)[0]
+
+
+class IncomingObject(BaseObject):
+
+ _depth = None
+ _is_ca = False
+ _disposition = None
+
+ @property
+ def para_obj(self):
+ if getattr(self, "_para_id", None) is None:
+ self._rpdb.cur.execute("SELECT replacement FROM incoming WHERE id = ?", (self.rowid,))
+ self._para_id = self._rpdb.cur.fetchone()[0]
+ return self._rpdb.find_outgoing_by_id(self._para_id)
+
+ @para_obj.setter
+ def para_obj(self, value):
+ if value is None:
+ self._rpdb.cur.execute("DELETE FROM outgoing WHERE id IN (SELECT replacement FROM incoming WHERE id = ?)",
+ (self.rowid,))
+ try:
+ del self._para_id
+ except AttributeError:
+ pass
+ else:
+ assert isinstance(value.rowid, int)
+ self._rpdb.cur.execute("UPDATE incoming SET replacement = ? WHERE id = ?", (value.rowid, self.rowid))
+ self._para_id = value.rowid
+
+ @property
+ def disposition(self):
+ if self._disposition is None:
+ self._disposition = self._rpdb.cur.execute("SELECT disposition FROM incoming "
+ "WHERE id = ?", (self.rowid,)).fetchone()[0]
+ return self._disposition
+
+ @disposition.setter
+ def disposition(self, value):
+ self._rpdb.cur.execute("UPDATE incoming SET disposition = ? WHERE id = ?", (value, self.rowid))
+ self._disposition = value
+
+ @classmethod
+ def fromFile(cls, fn):
+ return cls._fn2map[os.path.splitext(fn)[1][1:]](DER_file = fn)
+
+ @classmethod
+ def create(cls, rpdb, rowid, fn2, der, uri, subject_id, issuer_id, depth = None, is_ca = False):
+ assert der is not None
+ self = super(IncomingObject, cls).create(rpdb, rowid, fn2, der, uri, subject_id, issuer_id)
+ self._depth = depth
+ self._is_ca = is_ca
+ return self
+
+ @property
+ def depth(self):
+ return self._depth
+
+ @property
+ def is_ca(self):
+ return self._is_ca
+
+ @property
+ def issuer(self):
+ if self._issuer_id is None or self._issuer_id == self._subject_id:
+ return None
+ return self._rpdb.find_incoming_by_id(self._issuer_id)
+
+
+class OutgoingObject(BaseObject):
+
+ @property
+ def orig_obj(self):
+ if getattr(self, "_orig_id", None) is None:
+ self._rpdb.cur.execute("SELECT id FROM incoming WHERE replacement = ?", (self.rowid,))
+ r = self._rpdb.cur.fetchone()
+ self._orig_id = None if r is None else r[0]
+ return self._rpdb.find_incoming_by_id(self._orig_id)
+
+
+class BaseX509(rpki.x509.X509):
+
+ @property
+ def resources(self):
+ r = self.get_3779resources()
+ r.valid_until = None
+ return r
+
+ def find_children(self, fn2 = None):
+ return self._rpdb._find_results(fn2, "WHERE issuer = ?", [self.subject_id])
+
+
+class BaseCRL(rpki.x509.CRL):
+
+ @property
+ def resources(self):
+ return None
+
+
+class CommonCMS(object):
+
+ @property
+ def resources(self):
+ r = rpki.x509.X509(POW = self.get_POW().certs()[0]).get_3779resources()
+ r.valid_until = None
+ return r
+
+
+class BaseSignedManifest (rpki.x509.SignedManifest, CommonCMS): pass
+class BaseROA (rpki.x509.ROA, CommonCMS): pass
+class BaseGhostbuster (rpki.x509.Ghostbuster, CommonCMS): pass
+
+class IncomingX509 (BaseX509, IncomingObject): pass
+class IncomingCRL (BaseCRL, IncomingObject): pass
+class IncomingSignedManifest (BaseSignedManifest, IncomingObject): pass
+class IncomingROA (BaseROA, IncomingObject): pass
+class IncomingGhostbuster (BaseGhostbuster, IncomingObject): pass
+
+class OutgoingX509 (BaseX509, OutgoingObject): pass
+class OutgoingCRL (BaseCRL, OutgoingObject): pass
+class OutgoingSignedManifest (BaseSignedManifest, OutgoingObject): pass
+class OutgoingROA (BaseROA, OutgoingObject): pass
+class OutgoingGhostbuster (BaseGhostbuster, OutgoingObject): pass
+
+IncomingObject.setfn2map(cer = IncomingX509,
+ crl = IncomingCRL,
+ mft = IncomingSignedManifest,
+ roa = IncomingROA,
+ gbr = IncomingGhostbuster)
+
+OutgoingObject.setfn2map(cer = OutgoingX509,
+ crl = OutgoingCRL,
+ mft = OutgoingSignedManifest,
+ roa = OutgoingROA,
+ gbr = OutgoingGhostbuster)
+
+
+class RPDB(object):
+ """
+ Relying party database.
+ """
+
+ def __init__(self, db_name):
+
+ try:
+ os.unlink(db_name)
+ except:
+ pass
+
+ self.db = sqlite3.connect(db_name, detect_types = sqlite3.PARSE_DECLTYPES)
+ self.db.text_factory = str
+ self.cur = self.db.cursor()
+
+ self.incoming_cache = weakref.WeakValueDictionary()
+ self.outgoing_cache = weakref.WeakValueDictionary()
+
+ self.cur.executescript('''
+ PRAGMA foreign_keys = on;
+
+ CREATE TABLE keyname (
+ id INTEGER PRIMARY KEY NOT NULL,
+ name TEXT NOT NULL,
+ keyid BLOB NOT NULL,
+ UNIQUE (name, keyid));
+
+ CREATE TABLE incoming (
+ id INTEGER PRIMARY KEY NOT NULL,
+ der BLOB NOT NULL,
+ fn2 TEXT NOT NULL
+ CHECK (fn2 IN ('cer', 'crl', 'mft', 'roa', 'gbr')),
+ uri TEXT NOT NULL,
+ depth INTEGER,
+ is_ca BOOLEAN NOT NULL DEFAULT 0,
+ disposition TEXT NOT NULL
+ DEFAULT 'keep'
+ CHECK (disposition IN ('keep', 'delete', 'replace')),
+ subject INTEGER
+ REFERENCES keyname(id)
+ ON DELETE RESTRICT
+ ON UPDATE RESTRICT,
+ issuer INTEGER NOT NULL
+ REFERENCES keyname(id)
+ ON DELETE RESTRICT
+ ON UPDATE RESTRICT,
+ replacement INTEGER
+ REFERENCES outgoing(id)
+ ON DELETE SET NULL
+ ON UPDATE SET NULL,
+ UNIQUE (der),
+ UNIQUE (subject, issuer),
+ CHECK ((subject IS NULL) == (fn2 == 'crl')));
+
+ CREATE TABLE outgoing (
+ id INTEGER PRIMARY KEY NOT NULL,
+ der BLOB,
+ key BLOB,
+ fn2 TEXT NOT NULL
+ CHECK (fn2 IN ('cer', 'crl', 'mft', 'roa', 'gbr')),
+ uri TEXT NOT NULL,
+ subject INTEGER
+ REFERENCES keyname(id)
+ ON DELETE RESTRICT
+ ON UPDATE RESTRICT,
+ issuer INTEGER NOT NULL
+ REFERENCES keyname(id)
+ ON DELETE RESTRICT
+ ON UPDATE RESTRICT,
+ UNIQUE (subject, issuer),
+ CHECK ((key IS NULL) == (fn2 == 'crl')),
+ CHECK ((subject IS NULL) == (fn2 == 'crl')));
+
+ CREATE TABLE range (
+ id INTEGER NOT NULL
+ REFERENCES incoming(id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE,
+ min RangeVal NOT NULL,
+ max RangeVal NOT NULL,
+ UNIQUE (id, min, max));
+
+ ''')
+
+
+ def load(self, rcynic_input, spinner = 100):
+
+ start = rpki.sundial.now()
+ nobj = 0
+
+ for root, dirs, files in os.walk(rcynic_input):
+ for fn in files:
+ fn = os.path.join(root, fn)
+
+ try:
+ obj = IncomingObject.fromFile(fn)
+ except:
+ if spinner:
+ sys.stderr.write("\r")
+ sys.stderr.write("Couldn't read %s, skipping\n" % fn)
+ continue
+
+ if spinner and nobj % spinner == 0:
+ sys.stderr.write("\r%s %d %s..." % ("|\\-/"[(nobj/spinner) & 3], nobj, rpki.sundial.now() - start))
+
+ nobj += 1
+
+ if obj.fn2 == "crl":
+ ski = None
+ aki = buffer(obj.get_AKI())
+ cer = None
+ bag = None
+ issuer = obj.getIssuer()
+ subject = None
+ is_ca = False
+
+ else:
+ if obj.fn2 == "cer":
+ cer = obj
+ else:
+ cer = rpki.x509.X509(POW = obj.get_POW().certs()[0])
+ issuer = cer.getIssuer()
+ subject = cer.getSubject()
+ ski = buffer(cer.get_SKI())
+ aki = cer.get_AKI()
+ if aki is None:
+ assert subject == issuer
+ aki = ski
+ else:
+ aki = buffer(aki)
+ bag = cer.get_3779resources()
+ is_ca = cer.is_CA()
+
+ der = buffer(obj.get_DER())
+ uri = "rsync://" + fn[len(rcynic_input) + 1:]
+
+ self.cur.execute("SELECT id FROM incoming WHERE der = ?", (der,))
+ r = self.cur.fetchone()
+
+ if r is not None:
+ rowid = r[0]
+
+ else:
+ subject_id = None if ski is None else self.find_keyname(subject, ski)
+ issuer_id = self.find_keyname(issuer, aki)
+
+ self.cur.execute("INSERT INTO incoming (der, fn2, subject, issuer, uri, is_ca) "
+ "VALUES (?, ?, ?, ?, ?, ?)",
+ (der, obj.fn2, subject_id, issuer_id, uri, is_ca))
+ rowid = self.cur.lastrowid
+
+ if bag is not None:
+ for rset in (bag.asn, bag.v4, bag.v6):
+ if rset is not None:
+ self.cur.executemany("REPLACE INTO range (id, min, max) VALUES (?, ?, ?)",
+ ((rowid, i.min, i.max) for i in rset))
+
+ if spinner:
+ sys.stderr.write("\r= %d objects in %s.\n" % (nobj, rpki.sundial.now() - start))
+
+ self.cur.execute("UPDATE incoming SET depth = 0 WHERE subject = issuer")
+
+ for depth in xrange(1, 500):
+
+ self.cur.execute("SELECT COUNT(*) FROM incoming WHERE depth IS NULL")
+ if self.cur.fetchone()[0] == 0:
+ break
+
+ if spinner:
+ sys.stderr.write("\rSetting depth %d..." % depth)
+
+ self.cur.execute("""
+ UPDATE incoming SET depth = ?
+ WHERE depth IS NULL
+ AND issuer IN (SELECT subject FROM incoming WHERE depth = ?)
+ """,
+ (depth, depth - 1))
+
+ else:
+ if spinner:
+ sys.stderr.write("\rSetting depth %d is absurd, giving up, " % depth)
+
+ if spinner:
+ sys.stderr.write("\nCommitting...")
+
+ self.db.commit()
+
+ if spinner:
+ sys.stderr.write("done.\n")
+
+
+ def add_para(self, obj, resources, serial, ltacer, ltasia, ltaaia, ltamft, ltacrl, ltakey):
+
+ assert isinstance(obj, IncomingX509)
+
+ if obj.para_obj is not None:
+ resources &= obj.para_obj.resources
+
+ obj.para_obj = None
+
+ if not resources:
+ return
+
+ pow = obj.get_POW()
+
+ x = rpki.POW.X509()
+
+ x.setVersion( pow.getVersion())
+ x.setSubject( pow.getSubject())
+ x.setNotBefore( pow.getNotBefore())
+ x.setNotAfter( pow.getNotAfter())
+ x.setPublicKey( pow.getPublicKey())
+ x.setSKI( pow.getSKI())
+ x.setBasicConstraints( pow.getBasicConstraints())
+ x.setKeyUsage( pow.getKeyUsage())
+ x.setCertificatePolicies( pow.getCertificatePolicies())
+ x.setSIA( *pow.getSIA())
+
+ x.setIssuer( ltacer.get_POW().getIssuer())
+ x.setAKI( ltacer.get_POW().getSKI())
+ x.setAIA( (ltaaia,))
+ x.setCRLDP( (ltacrl,))
+
+ x.setSerial( serial())
+ x.setRFC3779(
+ asn = ((r.min, r.max) for r in resources.asn),
+ ipv4 = ((r.min, r.max) for r in resources.v4),
+ ipv6 = ((r.min, r.max) for r in resources.v6))
+
+ x.sign(ltakey.get_POW(), rpki.POW.SHA256_DIGEST)
+ cer = OutgoingX509(POW = x)
+
+ ski = buffer(cer.get_SKI())
+ aki = buffer(cer.get_AKI())
+ bag = cer.get_3779resources()
+ issuer = cer.getIssuer()
+ subject = cer.getSubject()
+ der = buffer(cer.get_DER())
+ uri = ltasia + cer.gSKI() + ".cer"
+
+ # This will want to change when we start generating replacement keys for everything.
+ # This should really be a keypair, not just a public key, same comment.
+ #
+ key = buffer(pow.getPublicKey().derWritePublic())
+
+ subject_id = self.find_keyname(subject, ski)
+ issuer_id = self.find_keyname(issuer, aki)
+
+ self.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) "
+ "VALUES (?, 'cer', ?, ?, ?, ?)",
+ (der, subject_id, issuer_id, uri, key))
+ rowid = self.cur.lastrowid
+ self.cur.execute("UPDATE incoming SET replacement = ? WHERE id = ?",
+ (rowid, obj.rowid))
+
+ # Fix up _orig_id and _para_id here? Maybe later.
+
+ #self.db.commit()
+
+
+ def dump_paras(self, rcynic_output):
+ shutil.rmtree(rcynic_output, ignore_errors = True)
+ rsync = "rsync://"
+ for der, uri in self.cur.execute("SELECT der, uri FROM outgoing"):
+ assert uri.startswith(rsync)
+ fn = os.path.join(rcynic_output, uri[len(rsync):])
+ dn = os.path.dirname(fn)
+ if not os.path.exists(dn):
+ os.makedirs(dn)
+ with open(fn, "wb") as f:
+ #print ">> Writing", f.name
+ f.write(der)
+
+
+ def find_keyname(self, name, keyid):
+ keys = (name, buffer(keyid))
+ self.cur.execute("SELECT id FROM keyname WHERE name = ? AND keyid = ?", keys)
+ result = self.cur.fetchone()
+ if result is None:
+ self.cur.execute("INSERT INTO keyname (name, keyid) VALUES (?, ?)", keys)
+ result = self.cur.lastrowid
+ else:
+ result = result[0]
+ return result
+
+
+ def find_keyname_by_id(self, rowid):
+ self.cur.execute("SELECT name, keyid FROM keyname WHERE id = ?", (rowid,))
+ result = self.cur.fetchone()
+ return (None, None) if result is None else result
+
+
+ def find_incoming_by_id(self, rowid):
+ if rowid is None:
+ return None
+ if rowid in self.incoming_cache:
+ return self.incoming_cache[rowid]
+ r = self._find_results(None, "WHERE id = ?", [rowid])
+ assert len(r) < 2
+ return r[0] if r else None
+
+
+ def find_outgoing_by_id(self, rowid):
+ if rowid is None:
+ return None
+ if rowid in self.outgoing_cache:
+ return self.outgoing_cache[rowid]
+ self.cur.execute("SELECT fn2, der, key, uri, subject, issuer FROM outgoing WHERE id = ?", (rowid,))
+ r = self.cur.fetchone()
+ if r is None:
+ return None
+ fn2, der, key, uri, subject_id, issuer_id = r
+ obj = OutgoingObject.create(rpdb = self, rowid = rowid, fn2 = fn2, der = der, uri = uri,
+ subject_id = subject_id, issuer_id = issuer_id)
+ self.outgoing_cache[rowid] = obj
+ return obj
+
+
+ def find_by_ski_or_uri(self, ski, uri):
+ if not ski and not uri:
+ return []
+ j = ""
+ w = []
+ a = []
+ if ski:
+ j = "JOIN keyname ON incoming.subject = keyname.id"
+ w.append("keyname.keyid = ?")
+ a.append(buffer(ski))
+ if uri:
+ w.append("incoming.uri = ?")
+ a.append(uri)
+ return self._find_results(None, "%s WHERE %s" % (j, " AND ".join(w)), a)
+
+
+ # It's easiest to understand overlap conditions by understanding
+ # non-overlap then inverting and and applying De Morgan's law.
+ # Ranges A and B do not overlap if: A.min > B.max or B.min > A.max;
+ # therefore A and B do overlap if: A.min <= B.max and B.min <= A.max.
+
+ def find_by_range(self, range_min, range_max = None, fn2 = None):
+ if range_max is None:
+ range_max = range_min
+ if isinstance(range_min, (str, unicode)):
+ range_min = long(range_min) if range_min.isdigit() else rpki.POW.IPAddress(range_min)
+ if isinstance(range_max, (str, unicode)):
+ range_max = long(range_max) if range_max.isdigit() else rpki.POW.IPAddress(range_max)
+ assert isinstance(range_min, (int, long, rpki.POW.IPAddress))
+ assert isinstance(range_max, (int, long, rpki.POW.IPAddress))
+ return self._find_results(fn2,
+ "JOIN range ON incoming.id = range.id "
+ "WHERE ? <= range.max AND ? >= range.min",
+ [range_min, range_max])
+
+
+ def find_by_resource_bag(self, bag, fn2 = None):
+ assert bag.asn or bag.v4 or bag.v6
+ qset = []
+ aset = []
+ for rset in (bag.asn, bag.v4, bag.v6):
+ if rset:
+ for r in rset:
+ qset.append("(? <= max AND ? >= min)")
+ aset.append(r.min)
+ aset.append(r.max)
+ return self._find_results(
+ fn2,
+ """
+ JOIN range ON incoming.id = range.id
+ WHERE
+ """ + (" OR ".join(qset)),
+ aset)
+
+
+ def _find_results(self, fn2, query, args = None):
+ if args is None:
+ args = []
+ if fn2 is not None:
+ query += " AND fn2 = ?"
+ args.append(fn2)
+ results = []
+ for rowid, fn2, der, uri, subject_id, issuer_id, depth, is_ca in self.cur.execute(
+ '''
+ SELECT DISTINCT
+ incoming.id, incoming.fn2,
+ incoming.der, incoming.uri,
+ incoming.subject, incoming.issuer,
+ incoming.depth, incoming.is_ca
+ FROM incoming
+ ''' + query, args):
+ if rowid in self.incoming_cache:
+ obj = self.incoming_cache[rowid]
+ assert obj.rowid == rowid
+ else:
+ obj = IncomingObject.create(rpdb = self, rowid = rowid, fn2 = fn2, der = der, uri = uri,
+ subject_id = subject_id, issuer_id = issuer_id, depth = depth,
+ is_ca = is_ca)
+ self.incoming_cache[rowid] = obj
+ results.append(obj)
+ return results
+
+
+ def commit(self):
+ self.db.commit()
+
+
+ def close(self):
+ self.commit()
+ self.cur.close()
+ self.db.close()
+
+if __name__ == "__main__":
+ #profile = None
+ profile = "rcynic-lta.prof"
+ if profile:
+ import cProfile
+ prof = cProfile.Profile()
+ try:
+ prof.runcall(main)
+ finally:
+ prof.dump_stats(profile)
+ sys.stderr.write("Dumped profile data to %s\n" % profile)
+ else:
+ main()
+
diff --git a/potpourri/rcynic-lta.yaml b/potpourri/rcynic-lta.yaml
new file mode 100644
index 00000000..ab17a56c
--- /dev/null
+++ b/potpourri/rcynic-lta.yaml
@@ -0,0 +1,69 @@
+db-name:
+ /u/sra/rpki/subvert-rpki.hactrn.net/trunk/scripts/rcynic-lta.db
+
+rcynic-input:
+ /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/rcynic-data/authenticated
+
+rcynic-output:
+ /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/rcynic-data/lta-unauthenticated
+
+tal-directory:
+ /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/sample-trust-anchors
+
+keyfile:
+ /u/sra/rpki/subvert-rpki.hactrn.net/trunk/scripts/rcynic-lta.key
+
+common:
+
+ - &VCARD-1 |
+ BEGIN:VCARD
+ VERSION:4.0
+ FN:R0
+ ORG:Organizational Entity
+ ADR;TYPE=WORK:;;42 Twisty Passage;Deep Cavern;WA;98666;U.S.A.
+ TEL;TYPE=VOICE,TEXT,WORK;VALUE=uri:tel:+1-666-555-1212
+ TEL;TYPE=FAX,WORK;VALUE=uri:tel:+1-666-555-1213
+ EMAIL:human@example.com
+ END:VCARD
+ - &GBR-1 { gbr: *VCARD-1 }
+
+ - &VCARD-2 |
+ BEGIN:VCARD
+ VERSION:4.0
+ ORG:Epilogue Technology Corporation
+ EMAIL:sra@epilogue.com
+ END:VCARD
+ - &GBR-2 { gbr: *VCARD-2 }
+
+ - &ROA-666 { roa: { asn: 666 } }
+
+constraints:
+
+ # Need something for a drop test, sorry Randy
+ - prefix: 147.28.224.0/19
+ drop: roas
+
+ - prefix: 10.0.0.0/8
+ add: [ { roa: { asn: 666, maxlen: 16 }}, *GBR-1 ]
+
+ - prefix: 192.168.0.0/16
+ drop: all
+
+ - asn: 666
+ add: [ *GBR-1 ]
+
+ - prefix: 128.224.0.0/16
+ drop: all
+ add: [ *GBR-2 ]
+
+ - prefix: 128.224.1.0/24
+ add: [ *GBR-2, *ROA-666 ]
+
+ - prefix: 128.224.2.0/24
+ add: [ *GBR-2, *ROA-666 ]
+
+ - prefix: 149.20.0.0/16
+ add: [ *ROA-666 ]
+
+ - prefix: 2001:4f8:3:d::/64
+ add: [ *ROA-666 ]
diff --git a/potpourri/repo0-testbed-daily b/potpourri/repo0-testbed-daily
new file mode 100755
index 00000000..576464c4
--- /dev/null
+++ b/potpourri/repo0-testbed-daily
@@ -0,0 +1,19 @@
+#!/bin/sh -
+# $Id: daily 602 2013-06-02 18:00:25Z sra $
+
+home=/home/sra/rpki.testbed
+
+exec >>$home/logs/daily.log 2>&1
+
+set -x
+
+date
+
+cd $home
+
+/usr/local/sbin/rpkic update_bpki
+
+/usr/local/bin/svn update
+/usr/local/bin/svn add --force .
+/usr/local/bin/svn ci --message 'Daily auto update'
+/usr/local/bin/svn update
diff --git a/potpourri/repo0-testbed-monthly b/potpourri/repo0-testbed-monthly
new file mode 100755
index 00000000..b7ada110
--- /dev/null
+++ b/potpourri/repo0-testbed-monthly
@@ -0,0 +1,22 @@
+#!/bin/sh -
+# $Id: monthly 602 2013-06-02 18:00:25Z sra $
+
+home=/home/sra/rpki.testbed
+
+exec >>$home/logs/monthly.log 2>&1
+
+set -x
+
+date
+
+cd $home
+
+for identity in iana afrinic apnic arin lacnic legacy ripe
+do
+ /usr/local/sbin/rpkic -i $identity renew_all_children
+done
+
+/usr/local/bin/svn update
+/usr/local/bin/svn add --force .
+/usr/local/bin/svn ci --message 'Monthly auto update'
+/usr/local/bin/svn update
diff --git a/potpourri/repo0-testbed-weekly b/potpourri/repo0-testbed-weekly
new file mode 100755
index 00000000..6f1f8ead
--- /dev/null
+++ b/potpourri/repo0-testbed-weekly
@@ -0,0 +1,96 @@
+#!/bin/sh -
+# $Id: weekly 756 2013-11-21 22:54:28Z sra $
+#
+# Run weekly periodic IR back-end tasks.
+
+home=/home/sra/rpki.testbed
+
+top=/home/sra/subvert-rpki.hactrn.net/trunk
+
+exec >>$home/logs/weekly.log 2>&1
+set -x
+date
+
+export OPENSSL_CONF=/dev/null
+for openssl in $top/openssl/openssl/apps/openssl /usr/local/bin/openssl
+do
+ test -x $openssl && break
+done
+
+## Download all input files. See the fetch script for the current
+## list of files, but for a long time now it's been:
+##
+## http://www.iana.org/assignments/as-numbers/as-numbers.xml
+## http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml
+## http://www.iana.org/assignments/ipv6-unicast-address-assignments/ipv6-unicast-address-assignments.xml
+## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.aut-num.gz
+## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz
+## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz
+## ftp://ftp.ripe.net/pub/stats/ripencc/membership/alloclist.txt
+## ftp://ftp.apnic.net/public/stats/apnic/delegated-apnic-extended-latest
+##
+## Along with an ARIN bulkwhois dump which we get under a research NDA
+## and retrieve via a mechanism that I'm not going to describe here.
+
+/bin/sh -x $home/scripts/fetch
+
+## Process ARIN data first -- we need erx.csv, which comes from ARIN.
+
+cd $home/arin
+/usr/local/bin/unzip -p arin_db.zip arin_db.xml |
+/usr/local/bin/python $top/scripts/arin-to-csv.py
+/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv
+
+## Process IANA data, now that we have erx.csv.
+
+cd $home/iana
+/usr/local/bin/python $top/scripts/iana-to-csv.py
+
+## Process APNIC data.
+
+cd $home/apnic
+/usr/local/bin/python $top/scripts/apnic-to-csv.py
+/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv
+
+## Process RIPE data. RIPE's database is a horror, the less said
+## about it the better.
+##
+## Somewhere along the line we seem to have stopped even trying to
+## generate the ASN database for RIPE, not sure why. I've restored it
+## here, guess we'll find out if there was a reason why we disabled it.
+
+cd $home/ripe
+/usr/local/bin/python $top/scripts/ripe-asns-to-csv.py
+/usr/bin/awk -f $top/scripts/ripe-prefixes-to-csv.awk alloclist.txt |
+/bin/cat extra-prefixes.csv - |
+/usr/bin/sort -uo prefixes.csv
+/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv
+/usr/bin/sort -uo prefixes.csv prefixes.csv
+
+## Not yet doing anything for AfriNIC, LACNIC, or Legacy.
+
+## Generate root certificate. This is a lot simpler now that we're pretending to be the One True Root.
+
+cd $home/root
+$openssl req -new -x509 -days 90 -set_serial $(/bin/date -u +%s) -config root.conf -out root.cer -key root.key -outform DER
+/bin/cp -fp root.cer root.cer.dup &&
+/bin/mv -f root.cer.dup /home/pubd/publication/root.cer
+
+## Whack all the files into subversion.
+
+cd $home
+/usr/local/bin/svn update
+/usr/local/bin/svn add --force .
+/usr/local/bin/svn ci --message 'Weekly auto update'
+/usr/local/bin/svn update
+
+## Feed all the new data into the IRDB.
+
+for entity in iana afrinic apnic arin lacnic legacy ripe
+do
+ for resource in asns prefixes
+ do
+ /bin/test -r $entity/$resource.csv &&
+ /usr/local/sbin/rpkic --identity $entity load_$resource $entity/$resource.csv
+ done
+done
diff --git a/potpourri/ripe-asns-to-csv.py b/potpourri/ripe-asns-to-csv.py
new file mode 100644
index 00000000..50251ce8
--- /dev/null
+++ b/potpourri/ripe-asns-to-csv.py
@@ -0,0 +1,108 @@
+# $Id$
+#
+# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse a WHOIS research dump and write out (just) the RPKI-relevant
+fields in myrpki-format CSV syntax.
+
+NB: The input data for this script is publicly available via FTP, but
+you'll have to fetch the data from RIPE yourself, and be sure to see
+the terms and conditions referenced by the data file header comments.
+"""
+
+import gzip
+from rpki.csv_utils import csv_writer
+
+class Handle(dict):
+
+ want_tags = ()
+
+ debug = False
+
+ def set(self, tag, val):
+ if tag in self.want_tags:
+ self[tag] = "".join(val.split(" "))
+
+ def check(self):
+ for tag in self.want_tags:
+ if not tag in self:
+ return False
+ if self.debug:
+ self.log()
+ return True
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__,
+ " ".join("%s:%s" % (tag, self.get(tag, "?"))
+ for tag in self.want_tags))
+
+ def log(self):
+ print repr(self)
+
+ def finish(self, ctx):
+ self.check()
+
+class aut_num(Handle):
+ want_tags = ("aut-num", "mnt-by", "as-name")
+
+ def set(self, tag, val):
+ if tag == "aut-num" and val.startswith("AS"):
+ val = val[2:]
+ Handle.set(self, tag, val)
+
+ def finish(self, ctx):
+ if self.check():
+ ctx.asns.writerow((self["mnt-by"], self["aut-num"]))
+
+class main(object):
+
+ types = dict((x.want_tags[0], x) for x in (aut_num,))
+
+
+ def finish_statement(self, done):
+ if self.statement:
+ tag, sep, val = self.statement.partition(":")
+ assert sep, "Couldn't find separator in %r" % self.statement
+ tag = tag.strip().lower()
+ val = val.strip().upper()
+ if self.cur is None:
+ self.cur = self.types[tag]() if tag in self.types else False
+ if self.cur is not False:
+ self.cur.set(tag, val)
+ if done and self.cur:
+ self.cur.finish(self)
+ self.cur = None
+
+ filenames = ("ripe.db.aut-num.gz",)
+
+ def __init__(self):
+ self.asns = csv_writer("asns.csv")
+ for fn in self.filenames:
+ f = gzip.open(fn)
+ self.statement = ""
+ self.cur = None
+ for line in f:
+ line = line.expandtabs().partition("#")[0].rstrip("\n")
+ if line and not line[0].isalpha():
+ self.statement += line[1:] if line[0] == "+" else line
+ else:
+ self.finish_statement(not line)
+ self.statement = line
+ self.finish_statement(True)
+ f.close()
+ self.asns.close()
+
+main()
diff --git a/potpourri/ripe-prefixes-to-csv.awk b/potpourri/ripe-prefixes-to-csv.awk
new file mode 100644
index 00000000..37327484
--- /dev/null
+++ b/potpourri/ripe-prefixes-to-csv.awk
@@ -0,0 +1,37 @@
+#!/usr/bin/awk -f
+# $Id$
+
+# ftp -pa ftp://ftp.ripe.net/pub/stats/ripencc/membership/alloclist.txt
+
+function done() {
+ for (i = 1; i <= n_allocs; i++)
+ print handle "\t" alloc[i];
+ n_allocs = 0;
+}
+
+/^[a-z]/ {
+ done();
+ handle = $0;
+ nr = NR;
+}
+
+NR == nr + 1 {
+ name = $0;
+}
+
+NR > nr + 2 && NF > 1 && $2 !~ /:/ {
+ split($2, a, "/");
+ len = a[2];
+ split(a[1], a, /[.]/);
+ for (i = length(a); i < 4; i++)
+ a[i+1] = 0;
+ alloc[++n_allocs] = sprintf("%d.%d.%d.%d/%d", a[1], a[2], a[3], a[4], len);
+}
+
+NR > nr + 2 && NF > 1 && $2 ~ /:/ {
+ alloc[++n_allocs] = $2;
+}
+
+END {
+ done();
+}
diff --git a/potpourri/ripe-to-csv.awk b/potpourri/ripe-to-csv.awk
new file mode 100644
index 00000000..5325574f
--- /dev/null
+++ b/potpourri/ripe-to-csv.awk
@@ -0,0 +1,124 @@
+#!/usr/bin/awk -f
+
+# Parse a WHOIS research dump and write out (just) the RPKI-relevant
+# fields in myrpki-format CSV syntax.
+#
+# Unfortunately, unlike the ARIN and APNIC databases, the RIPE database
+# doesn't really have any useful concept of an organizational handle.
+# More precisely, while it has handles out the wazoo, none of them are
+# useful as a reliable grouping mechanism for tracking which set of
+# resources are held by a particular organization. So, instead of being
+# able to track all of an organization's resources with a single handle
+# as we can in the ARIN and APNIC databases, the best we can do with the
+# RIPE database is to track individual resources, each with its own
+# resource handle. Well, for prefixes -- ASN entries behave more like
+# in the ARIN and APNIC databases.
+#
+# This is an AWK script rather than a Python script because it is a
+# fairly simple stream parser that has to process a ridiculous amount
+# of text. AWK turns out to be significantly faster for this.
+#
+# NB: The input data for this script is publicly available via FTP, but
+# you'll have to fetch the data from RIPE yourself, and be sure to see
+# the terms and conditions referenced by the data file header comments.
+#
+# $Id$
+#
+# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# On input, ":" is the most useful delimiter
+# On output, we want tab-delimited text.
+BEGIN {
+ FS = "[ \t]*:";
+ OFS = "\t";
+}
+
+# Clean up trailing whitespace.
+{
+ sub(/[ \t]+$/, "");
+}
+
+# Continuation line: strip comment, if any, then append value, if any,
+# to what we had from previous line(s).
+/^[^A-Z]/ {
+ sub(/[ \t]*#.*$/, "");
+ if (NF)
+ val = val $0;
+ next;
+}
+
+# Anything other than line continuation terminates the previous line,
+# so if we were working on a line, we're done with it now, process it.
+key {
+ do_line();
+}
+
+# Non-empty line and we have no tag, this must be start of a new block.
+NF && !tag {
+ tag = $1;
+}
+
+# One of the tags we care about, clean up and save the data.
+/^(AS-NAME|AUT-NUM|INET6NUM|INETNUM|MNT-BY|NETNAME|STATUS):/ {
+ key = $1;
+ sub(/^[^ \t]+:/, "");
+ sub(/[ \t]*#.*$/, "");
+ val = $0;
+}
+
+# Blank line and we have something, process it.
+!NF && tag {
+ do_block();
+}
+
+# End of file, process final data, if any.
+END {
+ do_line();
+ do_block();
+}
+
+# Handle one line, after line icky RPSL continuation.
+function do_line() {
+ gsub(/[ \t]/, "", val);
+ if (key && val)
+ tags[key] = val;
+ key = "";
+ val = "";
+}
+
+# Dispatch to handle known block types, then clean up so we can start
+# a new block.
+function do_block() {
+ if (tag == "INETNUM" || tag == "INET6NUM")
+ do_prefix();
+ else if (tag == "AUT-NUM")
+ do_asn();
+ delete tags;
+ tag = "";
+}
+
+# Handle an AUT-NUM block: extract the ASN, use MNT-BY as the handle.
+function do_asn() {
+ sub(/^AS/, "", tags[tag]);
+ if (tags["MNT-BY"] && tags[tag])
+ print tags["MNT-BY"], tags[tag] >"asns.csv";
+}
+
+# Handle an INETNUM or INET6NUM block: check for the status values we
+# care about, use NETNAME as the handle.
+function do_prefix() {
+ if (tags["STATUS"] ~ /^ASSIGNED(P[AI])$/ && tags["NETNAME"] && tags[tag])
+ print tags["NETNAME"], tags[tag] >"prefixes.csv";
+}
diff --git a/potpourri/ripe-to-csv.py b/potpourri/ripe-to-csv.py
new file mode 100644
index 00000000..b864345b
--- /dev/null
+++ b/potpourri/ripe-to-csv.py
@@ -0,0 +1,138 @@
+# $Id$
+#
+# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse a WHOIS research dump and write out (just) the RPKI-relevant
+fields in myrpki-format CSV syntax.
+
+Unfortunately, unlike the ARIN and APNIC databases, the RIPE database
+doesn't really have any useful concept of an organizational handle.
+More precisely, while it has handles out the wazoo, none of them are
+useful as a reliable grouping mechanism for tracking which set of
+resources are held by a particular organization. So, instead of being
+able to track all of an organization's resources with a single handle
+as we can in the ARIN and APNIC databases, the best we can do with the
+RIPE database is to track individual resources, each with its own
+resource handle. Well, for prefixes -- ASN entries behave more like
+in the ARIN and APNIC databases.
+
+Feh.
+
+NB: The input data for this script is publicly available via FTP, but
+you'll have to fetch the data from RIPE yourself, and be sure to see
+the terms and conditions referenced by the data file header comments.
+"""
+
+import gzip
+from rpki.csv_utils import csv_writer
+
+class Handle(dict):
+
+ want_tags = ()
+
+ want_status = ("ASSIGNED", "ASSIGNEDPA", "ASSIGNEDPI")
+
+ debug = False
+
+ def set(self, tag, val):
+ if tag in self.want_tags:
+ self[tag] = "".join(val.split(" "))
+
+ def check(self):
+ for tag in self.want_tags:
+ if not tag in self:
+ return False
+ if self.debug:
+ self.log()
+ return True
+
+ def __repr__(self):
+ return "<%s %s>" % (self.__class__.__name__,
+ " ".join("%s:%s" % (tag, self.get(tag, "?"))
+ for tag in self.want_tags))
+
+ def log(self):
+ print repr(self)
+
+ def finish(self, ctx):
+ self.check()
+
+class aut_num(Handle):
+ want_tags = ("aut-num", "mnt-by") # "as-name"
+
+ def set(self, tag, val):
+ if tag == "aut-num" and val.startswith("AS"):
+ val = val[2:]
+ Handle.set(self, tag, val)
+
+ def finish(self, ctx):
+ if self.check():
+ ctx.asns.writerow((self["mnt-by"], self["aut-num"]))
+
+class inetnum(Handle):
+ want_tags = ("inetnum", "netname", "status") # "mnt-by"
+
+ def finish(self, ctx):
+ if self.check() and self["status"] in self.want_status:
+ ctx.prefixes.writerow((self["netname"], self["inetnum"]))
+
+class inet6num(Handle):
+ want_tags = ("inet6num", "netname", "status") # "mnt-by"
+
+ def finish(self, ctx):
+ if self.check() and self["status"] in self.want_status:
+ ctx.prefixes.writerow((self["netname"], self["inet6num"]))
+
+class main(object):
+
+ types = dict((x.want_tags[0], x) for x in (aut_num, inetnum, inet6num))
+
+ def finish_statement(self, done):
+ if self.statement:
+ tag, sep, val = self.statement.partition(":")
+ assert sep, "Couldn't find separator in %r" % self.statement
+ tag = tag.strip().lower()
+ val = val.strip().upper()
+ if self.cur is None:
+ self.cur = self.types[tag]() if tag in self.types else False
+ if self.cur is not False:
+ self.cur.set(tag, val)
+ if done and self.cur:
+ self.cur.finish(self)
+ self.cur = None
+
+ filenames = ("ripe.db.aut-num.gz", "ripe.db.inet6num.gz", "ripe.db.inetnum.gz")
+
+ def __init__(self):
+ self.asns = csv_writer("asns.csv")
+ self.prefixes = csv_writer("prefixes.csv")
+ for fn in self.filenames:
+ f = gzip.open(fn)
+ self.statement = ""
+ self.cur = None
+ for line in f:
+ line = line.expandtabs().partition("#")[0].rstrip("\n")
+ if line and not line[0].isalpha():
+ self.statement += line[1:] if line[0] == "+" else line
+ else:
+ self.finish_statement(not line)
+ self.statement = line
+ self.finish_statement(True)
+ f.close()
+ self.asns.close()
+ self.prefixes.close()
+
+main()
diff --git a/potpourri/roa-to-irr.py b/potpourri/roa-to-irr.py
new file mode 100644
index 00000000..01b2aac8
--- /dev/null
+++ b/potpourri/roa-to-irr.py
@@ -0,0 +1,159 @@
+# $Id$
+#
+# Copyright (C) 2014 Dragon Research Labs ("DRL")
+# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notices and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL
+# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR
+# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
+# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA
+# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Generate IRR route and route6 objects from ROAs.
+"""
+
+import os
+import socket
+import sys
+import argparse
+import errno
+import time
+import rpki.x509
+
+args = None
+
+class route(object):
+ """
+ Interesting parts of a route object.
+ """
+
+ def __init__(self, label, uri, asnum, date, prefix, prefixlen, max_prefixlen):
+ self.label = label
+ self.uri = uri
+ self.asn = asnum
+ self.date = date
+ self.prefix = prefix
+ self.prefixlen = prefixlen
+ self.max_prefixlen = self.prefixlen if max_prefixlen is None else max_prefixlen
+
+ def __cmp__(self, other):
+ result = cmp(self.asn, other.asn)
+ if result == 0:
+ result = cmp(self.prefix, other.prefix)
+ if result == 0:
+ result = cmp(self.prefixlen, other.prefixlen)
+ if result == 0:
+ result = cmp(self.max_prefixlen, other.max_prefixlen)
+ if result == 0:
+ result = cmp(self.date, other.date)
+ return result
+
+ def __str__(self):
+ lines = (
+ "%-14s%s/%s" % (self.label, self.prefix, self.prefixlen),
+ "descr: %s/%s-%s" % (self.prefix, self.prefixlen, self.max_prefixlen),
+ "origin: AS%d" % self.asn,
+ "notify: %s" % args.notify,
+ "mnt-by: %s" % args.mnt_by,
+ "changed: %s %s" % (args.changed_by, self.date),
+ "source: %s" % args.source,
+ "override: %s" % args.password if args.password is not None else None,
+ "")
+ return "\n".join(line for line in lines if line is not None)
+
+ def write(self, output_directory):
+ name = "%s-%s-%s-AS%d-%s" % (self.prefix, self.prefixlen, self.max_prefixlen, self.asn, self.date)
+ with open(os.path.join(output_directory, name), "w") as f:
+ f.write(str(self))
+
+
+class route_list(list):
+ """
+ A list of route objects.
+ """
+
+ def __init__(self, rcynic_dir):
+ for root, dirs, files in os.walk(rcynic_dir):
+ for f in files:
+ if f.endswith(".roa"):
+ path = os.path.join(root, f)
+ uri = "rsync://" + path[len(rcynic_dir):].lstrip("/")
+ roa = rpki.x509.ROA(DER_file = path)
+ roa.extract()
+ assert roa.get_POW().getVersion() == 0, "ROA version is %d, expected 0" % roa.get_POW().getVersion()
+ asnum = roa.get_POW().getASID()
+ notBefore = roa.get_POW().certs()[0].getNotBefore().strftime("%Y%m%d")
+ v4, v6 = roa.get_POW().getPrefixes()
+ if v4 is not None:
+ for prefix, prefixlen, max_prefixlen in v4:
+ self.append(route("route:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen))
+ if v6 is not None:
+ for prefix, prefixlen, max_prefixlen in v6:
+ self.append(route("route6:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen))
+ self.sort()
+ for i in xrange(len(self) - 2, -1, -1):
+ if self[i] == self[i + 1]:
+ del self[i + 1]
+
+def email_header(f):
+ if args.email:
+ f.write("\n".join((
+ "From %s" % args.email_from,
+ "Date: %s" % time.strftime("%d %b %Y %T %z"),
+ "From: %s" % args.email_from,
+ "Subject: Fake email header to make irr_rpsl_submit happy",
+ "Message-Id: <%s.%s@%s>" % (os.getpid(), time.time(), socket.gethostname()),
+ "", "")))
+
+def main():
+
+ global args
+ whoami = "%s@%s" % (os.getlogin(), socket.gethostname())
+
+ parser = argparse.ArgumentParser(description = __doc__)
+ parser.add_argument("-c", "--changed_by", default = whoami,
+ help = "override \"changed:\" value")
+ parser.add_argument("-f", "--from", dest = "email_from", default = whoami,
+ help = "override \"from:\" header when using --email")
+ parser.add_argument("-m", "--mnt_by", default = "MAINT-RPKI",
+ help = "override \"mnt-by:\" value")
+ parser.add_argument("-n", "--notify", default = whoami,
+ help = "override \"notify:\" value")
+ parser.add_argument("-p", "--password",
+ help = "specify \"override:\" password")
+ parser.add_argument("-s", "--source", default = "RPKI",
+ help = "override \"source:\" value")
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument("-e", "--email", action = "store_true",
+ help = "generate fake RFC 822 header suitable for piping to irr_rpsl_submit")
+ group.add_argument("-o", "--output",
+ help = "write route and route6 objects to directory OUTPUT, one object per file")
+ parser.add_argument("authenticated_directory",
+ help = "directory tree containing authenticated rcynic output")
+ args = parser.parse_args()
+
+ if not os.path.isdir(args.authenticated_directory):
+ sys.exit("\"%s\" is not a directory" % args.authenticated_directory)
+
+ routes = route_list(args.authenticated_directory)
+
+ if args.output:
+ if not os.path.isdir(args.output):
+ os.makedirs(args.output)
+ for r in routes:
+ r.write(args.output)
+ else:
+ email_header(sys.stdout)
+ for r in routes:
+ sys.stdout.write("%s\n" % r)
+
+if __name__ == "__main__":
+ main()
diff --git a/potpourri/rp-sqlite b/potpourri/rp-sqlite
new file mode 100755
index 00000000..ee43096d
--- /dev/null
+++ b/potpourri/rp-sqlite
@@ -0,0 +1,425 @@
+#!/usr/local/bin/python
+
+# $Id$
+
+# Copyright (C) 2013 Dragon Research Labs ("DRL")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Preliminary script to work out what's involved in building an
+# SQLite3 database of RP objects. We haven't bothered with this until
+# now in rcynic, because we mostly just walk the filesystem tree, but
+# LTA and some of the ideas Tim is playing with require a lot of
+# lookups based on things that are not the URIs we use as filenames,
+# so some kind of indexing may become necessary. Given the complexity
+# of building any kind of real index over RFC 3779 resources,
+# otherwise fine lightweight tools like the Python shelve library
+# probably won't cut it here, and I don't want to add a dependency on
+# MySQL on the RP side (yet?), so let's see what we can do with SQLite3.
+
+import os
+import sys
+import yaml
+import base64
+import sqlite3
+import weakref
+import rpki.POW
+import rpki.x509
+import rpki.resource_set
+
+sqlite3.register_adapter(rpki.POW.IPAddress,
+ lambda x: buffer("_" + x.toBytes()))
+
+sqlite3.register_converter("RangeVal",
+ lambda s: long(s) if s.isdigit() else rpki.POW.IPAddress.fromBytes(s[1:]))
+
+
+def main():
+ rpdb = RPDB()
+ rpdb.load()
+ test(rpdb)
+ rpdb.close()
+
+def test(rpdb):
+ fn2s = [None] + rpdb.fn2map.keys()
+
+ print
+ print "Testing YAML parsing"
+ parse_yaml(rpdb)
+
+ print
+ print "Looking for certificates without AKI"
+ for r in rpdb.find_by_aki(None, "cer"):
+ print r, r.uris
+ print
+ print "Testing range functions"
+ for fn2 in fn2s:
+ if fn2 is not None:
+ print
+ print "Restricting search to type", fn2
+ print
+ print "Looking for range that should include adrilankha and psg again"
+ for r in rpdb.find_by_range("147.28.0.19", "147.28.0.62", fn2):
+ print r, r.uris
+ print
+ print "Looking for range that should include adrilankha"
+ for r in rpdb.find_by_range("147.28.0.19", "147.28.0.19", fn2):
+ print r, r.uris
+ print
+ print "Looking for range that should include ASN 3130"
+ for r in rpdb.find_by_range(3130, 3130, fn2):
+ print r, r.uris
+ print
+ print "Moving on to resource sets"
+ for fn2 in fn2s:
+ if fn2 is not None:
+ print
+ print "Restricting search to type", fn2
+ for expr in ("147.28.0.19-147.28.0.62",
+ "3130",
+ "2001:418:1::19/128",
+ "147.28.0.19-147.28.0.62,198.180.150.50/32",
+ "3130,147.28.0.19-147.28.0.62,198.180.150.50/32",
+ "2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128",
+ "147.28.0.19-147.28.0.62,2001:418:1::19/128,2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128"):
+ print
+ print "Trying", expr
+ for r in rpdb.find_by_resource_bag(rpki.resource_set.resource_bag.from_str(expr), fn2):
+ print r, r.uris
+
+
+def parse_xki(s):
+ """
+ Parse text form of an SKI or AKI. We accept two encodings:
+ colon-delimited hexadecimal, and URL-safe Base64. The former is
+ what OpenSSL prints in its text representation of SKI and AKI
+ extensions; the latter is the g(SKI) value that some RPKI CA engines
+ (including rpkid) use when constructing filenames.
+
+ In either case, we check that the decoded result contains the right
+ number of octets to be a SHA-1 hash.
+ """
+
+ if ":" in s:
+ b = "".join(chr(int(c, 16)) for c in s.split(":"))
+ else:
+ b = base64.urlsafe_b64decode(s + ("=" * (4 - len(s) % 4)))
+ if len(b) != 20:
+ raise RuntimeError("Bad length for SHA1 xKI value: %r" % s)
+ return b
+
+
+def parse_yaml(rpdb, fn = "rp-sqlite.yaml"):
+ yy = yaml.safe_load(open(fn, "r"))
+ for y in yy:
+
+ ski = None
+ uri = None
+ obj = set()
+
+ print
+
+ if "ski" in y:
+ ski = parse_xki(y["ski"])
+ obj.update(rpdb.find_by_ski(ski))
+ if "uri" in y:
+ uri = y["uri"]
+ obj.update(rpdb.find_by_uri(uri))
+ if len(obj) == 1:
+ obj = obj.pop()
+ else:
+ raise RuntimeError("Constraint entry must name a unique object using SKI, URI, or both (%r, %r, %r)" % (
+ ski, uri, obj))
+
+ print "URI:", uri
+ print "SKI:", " ".join("%02X" % ord(c) for c in ski), "(" + y["ski"] + ")"
+
+ new_resources = old_resources = obj.get_3779resources()
+
+ if "set" in y:
+ new_resources = rpki.resource_set.resource_bag.from_str(y["set"])
+
+ if "add" in y:
+ new_resources = new_resources | rpki.resource_set.resource_bag.from_str(y["add"])
+
+ if "sub" in y:
+ new_resources = new_resources - rpki.resource_set.resource_bag.from_str(y["sub"])
+
+ if new_resources == old_resources:
+ print "No resource change, skipping"
+ continue
+
+ print "Old:", old_resources
+ print "New:", new_resources
+ print "Add:", new_resources - old_resources
+ print "Sub:", old_resources - new_resources
+
+ # See draft-ietf-sidr-ltamgmt-08.txt for real processing details, but overview:
+ #
+ # - Process constraints file as above to determine list of target
+ # certificates (2.1). May need to add more fields to YAML hash
+ # for things like CP, CRLDP, etc, although I'm not entirely sure
+ # yet which of those it really makes sense to tweak via
+ # constraints.
+ #
+ # - Use resources from selected target certificates to determine
+ # which additional certificates we need to reissue to remove those
+ # resources (2.2, "perforation"). In theory we already have SQL
+ # that will just locate all of these for us.
+ #
+ # - Figure out which trust anchors to process (2.3, TA
+ # re-parenting); we can look in SQL for NULL AKI, but that's just
+ # a hint, we either have to verify that rcynic accepted those TAs
+ # or we have to look at the TALs. Looking at TALs is probably
+ # easier.
+ #
+ # At some point we probably need to parse the constraints file into
+ # Constraints objects or something like that, except that we may
+ # really need something more general that will accomodate
+ # perforation and TA reparenting as well. Figure out and refactor
+ # as we go along, most likely.
+
+
+class RPDB(object):
+ """
+ Relying party database.
+
+ For now just wire in the database name and rcynic root, fix this
+ later if overall approach seems usable. Might even end up just
+ being an in-memory SQL database, who knows?
+ """
+
+ fn2map = dict(cer = rpki.x509.X509,
+ crl = rpki.x509.CRL,
+ mft = rpki.x509.SignedManifest,
+ roa = rpki.x509.ROA,
+ gbr = rpki.x509.Ghostbuster)
+
+ def __init__(self, db_name = "rp-sqlite.db", delete_old_db = True):
+
+ if delete_old_db:
+ try:
+ os.unlink(db_name)
+ except:
+ pass
+
+ exists = os.path.exists(db_name)
+
+ self.db = sqlite3.connect(db_name, detect_types = sqlite3.PARSE_DECLTYPES)
+ self.db.text_factory = str
+ self.cur = self.db.cursor()
+
+ self.cache = weakref.WeakValueDictionary()
+
+ if exists:
+ return
+
+ self.cur.executescript('''
+ PRAGMA foreign_keys = on;
+
+ CREATE TABLE object (
+ id INTEGER PRIMARY KEY NOT NULL,
+ der BLOB NOT NULL,
+ fn2 TEXT NOT NULL,
+ ski BLOB,
+ aki BLOB,
+ inherits BOOLEAN NOT NULL,
+ UNIQUE (der));
+
+ CREATE TABLE uri (
+ id INTEGER NOT NULL,
+ uri TEXT NOT NULL,
+ UNIQUE (uri),
+ FOREIGN KEY (id) REFERENCES object(id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE);
+
+ CREATE INDEX uri_index ON uri(id);
+
+ CREATE TABLE range (
+ id INTEGER NOT NULL,
+ min RangeVal NOT NULL,
+ max RangeVal NOT NULL,
+ UNIQUE (id, min, max),
+ FOREIGN KEY (id) REFERENCES object(id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE);
+
+ CREATE INDEX range_index ON range(min, max);
+ ''')
+
+ def load(self,
+ rcynic_root = os.path.expanduser("~/rpki/subvert-rpki.hactrn.net/trunk/"
+ "rcynic/rcynic-data/unauthenticated"),
+ spinner = 100):
+
+ nobj = 0
+
+ for root, dirs, files in os.walk(rcynic_root):
+ for fn in files:
+ fn = os.path.join(root, fn)
+ fn2 = os.path.splitext(fn)[1][1:]
+
+ try:
+ obj = self.fn2map[fn2](DER_file = fn)
+ except:
+ continue
+
+ if spinner and nobj % spinner == 0:
+ sys.stderr.write("\r%s %d..." % ("|\\-/"[(nobj/spinner) & 3], nobj))
+
+ nobj += 1
+
+ inherits = False
+
+ if fn2 == "crl":
+ ski = None
+ aki = buffer(obj.get_AKI())
+ cer = None
+ bag = None
+
+ else:
+ if fn2 == "cer":
+ cer = obj
+ else:
+ cer = rpki.x509.X509(POW = obj.get_POW().certs()[0])
+ ski = buffer(cer.get_SKI())
+ try:
+ aki = buffer(cer.get_AKI())
+ except:
+ aki = None
+ bag = cer.get_3779resources()
+ inherits = bag.asn.inherit or bag.v4.inherit or bag.v6.inherit
+
+ der = buffer(obj.get_DER())
+ uri = "rsync://" + fn[len(rcynic_root) + 1:]
+
+ try:
+ self.cur.execute("INSERT INTO object (der, fn2, ski, aki, inherits) VALUES (?, ?, ?, ?, ?)",
+ (der, fn2, ski, aki, inherits))
+ rowid = self.cur.lastrowid
+
+ except sqlite3.IntegrityError:
+ self.cur.execute("SELECT id FROM object WHERE der = ? AND fn2 = ?", (der, fn2))
+ rows = self.cur.fetchall()
+ rowid = rows[0][0]
+ assert len(rows) == 1
+
+ else:
+ if bag is not None:
+ for rset in (bag.asn, bag.v4, bag.v6):
+ if rset is not None:
+ self.cur.executemany("REPLACE INTO range (id, min, max) VALUES (?, ?, ?)",
+ ((rowid, i.min, i.max) for i in rset))
+
+ self.cur.execute("INSERT INTO uri (id, uri) VALUES (?, ?)",
+ (rowid, uri))
+
+ if spinner:
+ sys.stderr.write("\r= %d objects, committing..." % nobj)
+
+ self.db.commit()
+
+ if spinner:
+ sys.stderr.write("done.\n")
+
+
+ def find_by_ski(self, ski, fn2 = None):
+ if ski is None:
+ return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE ski IS NULL")
+ else:
+ return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE ski = ?", [buffer(ski)])
+
+ def find_by_aki(self, aki, fn2 = None):
+ if aki is None:
+ return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE aki IS NULL")
+ else:
+ return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE aki = ?", [buffer(aki)])
+
+ def find_by_uri(self, uri):
+ return self._find_results(None, "SELECT object.id, fn2, der FROM object, uri WHERE uri.uri = ? AND object.id = uri.id", [uri])
+
+
+ # It's easiest to understand overlap conditions by understanding
+ # non-overlap then inverting and and applying De Morgan's law. Ranges
+ # A and B do not overlap if either A.min > B.max or A.max < B.min;
+ # therefore they do overlap if A.min <= B.max and A.max >= B.min.
+
+ def find_by_range(self, range_min, range_max = None, fn2 = None):
+ if range_max is None:
+ range_max = range_min
+ if isinstance(range_min, (str, unicode)):
+ range_min = long(range_min) if range_min.isdigit() else rpki.POW.IPAddress(range_min)
+ if isinstance(range_max, (str, unicode)):
+ range_max = long(range_max) if range_max.isdigit() else rpki.POW.IPAddress(range_max)
+ assert isinstance(range_min, (int, long, rpki.POW.IPAddress))
+ assert isinstance(range_max, (int, long, rpki.POW.IPAddress))
+ return self._find_results(
+ fn2,
+ """
+ SELECT object.id, fn2, der FROM object, range
+ WHERE ? <= max AND ? >= min AND object.id = range.id
+ """,
+ [range_min, range_max])
+
+
+ def find_by_resource_bag(self, bag, fn2 = None):
+ assert bag.asn or bag.v4 or bag.v6
+ qset = []
+ aset = []
+ for rset in (bag.asn, bag.v4, bag.v6):
+ if rset:
+ for r in rset:
+ qset.append("(? <= max AND ? >= min)")
+ aset.append(r.min)
+ aset.append(r.max)
+ return self._find_results(
+ fn2,
+ """
+ SELECT object.id, fn2, der FROM object, range
+ WHERE object.id = range.id AND (%s)
+ """ % (" OR ".join(qset)),
+ aset)
+
+
+ def _find_results(self, fn2, query, args = None):
+ if args is None:
+ args = []
+ if fn2 is not None:
+ assert fn2 in self.fn2map
+ query += " AND fn2 = ?"
+ args.append(fn2)
+ query += " GROUP BY object.id"
+ results = []
+ self.cur.execute(query, args)
+ selections = self.cur.fetchall()
+ for rowid, fn2, der in selections:
+ if rowid in self.cache:
+ obj = self.cache[rowid]
+ else:
+ obj = self.fn2map[fn2](DER = der)
+ self.cur.execute("SELECT uri FROM uri WHERE id = ?", (rowid,))
+ obj.uris = [u[0] for u in self.cur.fetchall()]
+ obj.uri = obj.uris[0] if len(obj.uris) == 1 else None
+ self.cache[rowid] = obj
+ results.append(obj)
+ return results
+
+
+ def close(self):
+ self.cur.close()
+ self.db.close()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/potpourri/rp-sqlite.yaml b/potpourri/rp-sqlite.yaml
new file mode 100644
index 00000000..4c0a0b8d
--- /dev/null
+++ b/potpourri/rp-sqlite.yaml
@@ -0,0 +1,53 @@
+- ski: B8:14:5D:13:53:7D:AE:6E:E2:E3:95:84:A8:99:EB:7D:1A:7D:E5:DF
+ uri: rsync://repo0.rpki.net/rpki/root.cer
+ add: 10.0.0.44/32
+
+- ski: A2:B3:2A:99:20:07:7A:E9:A4:9F:3F:02:F2:32:F9:3D:54:F8:7E:78
+ uri: rsync://repo0.rpki.net/rpki/root/iana.cer
+ sub: 10.0.0.0/8
+
+- ski: 3NYgwt_HYV91MeCGLWdUL4uq65M
+ uri: rsync://repo0.rpki.net/rpki/root/iana/5/3NYgwt_HYV91MeCGLWdUL4uq65M.cer
+ add: 10.0.0.0/8
+
+- ski: 3YFTaQOUkPQfhckX_ikYzy0mR9s
+ uri: rsync://repo0.rpki.net/rpki/root/iana/5/3YFTaQOUkPQfhckX_ikYzy0mR9s.cer
+
+- ski: 7ew1d5WFCSfhd8lnpmjwOohS_DQ
+ uri: rsync://repo0.rpki.net/rpki/root/iana/5/7ew1d5WFCSfhd8lnpmjwOohS_DQ.cer
+
+- ski: PWxDsvUgDdeloE3eQfceV8vbIyg
+ uri: rsync://repo0.rpki.net/rpki/root/iana/5/PWxDsvUgDdeloE3eQfceV8vbIyg.cer
+
+- ski: WnOFfpqobEKxzmvddJue3PXsEIQ
+ uri: rsync://repo0.rpki.net/rpki/root/iana/5/WnOFfpqobEKxzmvddJue3PXsEIQ.cer
+
+- ski: m6TQTKwKYyVva-Yq__I-Zz25Vns
+ uri: rsync://repo0.rpki.net/rpki/root/iana/5/m6TQTKwKYyVva-Yq__I-Zz25Vns.cer
+
+- ski: 8U5wQ47aZuuOXYPGX5BPvlcTfNg
+ uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/8U5wQ47aZuuOXYPGX5BPvlcTfNg.cer
+
+- ski: bccxGl4Xl4ur3nd1fwQ-1QIwtNA
+ uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/bccxGl4Xl4ur3nd1fwQ-1QIwtNA.cer
+
+- ski: zbXifbEkZNmOVhYZTCZFfLPxhjM
+ uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/zbXifbEkZNmOVhYZTCZFfLPxhjM.cer
+
+- ski: XYjTToHopYme-hlwgUyUyYRD_VQ
+ uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/XYjTToHopYme-hlwgUyUyYRD_VQ.cer
+
+- ski: _3I3i3uVmnliCinb2STR2MaxuU8
+ uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/_3I3i3uVmnliCinb2STR2MaxuU8.cer
+
+- ski: qdtoqOMCNSOdYuNNC7ya3dUaPl4
+ uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/qdtoqOMCNSOdYuNNC7ya3dUaPl4.cer
+
+- ski: yq4x9MteoM0DQYTh6NLbbmf--QY
+ uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/yq4x9MteoM0DQYTh6NLbbmf--QY.cer
+
+- ski: j2TDGclJnZ7mXpZCQJS0cfkOL34
+ uri: rsync://repo0.rpki.net/rpki/root/iana/apnic/7/j2TDGclJnZ7mXpZCQJS0cfkOL34.cer
+
+- ski: yRk89OOx4yyJHJ6Z1JLLnk0_oDc
+ uri: rsync://repo0.rpki.net/rpki/root/iana/apnic/7/yRk89OOx4yyJHJ6Z1JLLnk0_oDc.cer
diff --git a/potpourri/rpki b/potpourri/rpki
new file mode 120000
index 00000000..168548eb
--- /dev/null
+++ b/potpourri/rpki
@@ -0,0 +1 @@
+../rpkid/rpki \ No newline at end of file
diff --git a/potpourri/rpkidemo b/potpourri/rpkidemo
new file mode 100755
index 00000000..fdb4e1bb
--- /dev/null
+++ b/potpourri/rpkidemo
@@ -0,0 +1,495 @@
+#!/usr/bin/env python
+
+"""
+Hosted GUI client startup script, for workshops, etc.
+
+As of when this is run, we assume that the tarball (contents TBD and
+perhaps changing from one workshop to another) have been unpacked,
+that we are on some Unix-like machine, and that we are executing in
+a Python interpreter. We have to check anything else we care about.
+
+In what we hope is the most common case, this script should be run
+with no options.
+
+$Id$
+
+Copyright (C) 2010 Internet Systems Consortium ("ISC")
+
+Permission to use, copy, modify, and distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+"""
+
+# Check Python version before doing anything else
+
+import sys
+
+python_version = sys.version_info[:2]
+
+have_ssl_module = python_version >= (2, 6)
+
+if python_version == (2, 5):
+ print """
+ WARNING WARNING WARNING
+
+ You are running Python version 2.5, which does not include
+ real SSL support. This means that sessions created by this
+ script will be vulnerable to monkey-in-the-middle attacks.
+
+ Python 2.6 does not have this problem.
+ """
+ while True:
+ answer = raw_input("Are you SURE you want to proceed? (yes/NO) ").strip().lower()
+ if answer in ("", "n", "no"):
+ sys.exit("You have chosen wisely")
+ elif answer in ("y", "yes"):
+ print "You have been warned"
+ break
+ else:
+ print 'Please answer "yes" or "no"'
+
+elif have_ssl_module:
+ try:
+ import ssl
+ except ImportError:
+ sys.exit("You're running Python 2.6+, but I can't find the ssl module, so you have no SSL support at all, argh!")
+
+else:
+ sys.exit("Sorry, this script requires Python 2.6+, I seem to be running in %s" % sys.version)
+
+# Ok, it's safe to import the other stuff we need now
+
+import os, subprocess, webbrowser, urllib2, getpass, re, errno, time, email.utils, httplib, socket, getopt, urllib, cookielib
+import tempfile
+from xml.etree.ElementTree import fromstring as ElementFromString
+
+def save(filename, data):
+ """
+ Save data to a file.
+ """
+
+ tempname = "%s.%d.tmp" % (filename, os.getpid())
+ f = open(tempname, "w")
+ f.write(data)
+ f.close()
+ os.rename(tempname, filename)
+
+def save_error(err):
+ """
+ Save the data from the file-like object "f" into a temporary file
+ and open a web browser to view the result.
+ """
+
+ with tempfile.NamedTemporaryFile(prefix = "rpkidemo-error", suffix = ".html", delete = False) as tmpf:
+ tmpf.write(err.read())
+
+ # Save filename for use outside the with statement. This ensures
+ # the file is properly flushed prior to invoking the web browser.
+ fname = tmpf.name
+
+ sys.stderr.write("errors saved in %s\n" % fname)
+ webbrowser.open("file://" + fname)
+
+class CSV_File(object):
+ """
+ A CSV file that's being maintained by the GUI but being monitored,
+ downloaded, and used here.
+ """
+
+ def __init__(self, filename, url):
+ self.filename = filename
+ self.url = url
+ try:
+ self.timestamp = os.stat(filename).st_mtime
+ except:
+ self.store(0, "")
+
+ def last_modified(self):
+ """
+ Return CSV file timestamp formatted for use with HTTP.
+ """
+ return email.utils.formatdate(self.timestamp, False, True)
+
+ def store(self, timestamp, data):
+ """
+ Save CSV file, and record new timestamp.
+ """
+ save(self.filename, data)
+ self.timestamp = timestamp
+ os.utime(self.filename, (time.time(), timestamp))
+
+
+class AbstractHTTPSConnection(httplib.HTTPSConnection):
+ """
+ Customization of httplib.HTTPSConnection to enable certificate
+ validation.
+
+ This is an abstract class; subclass must set trust_anchor to the
+ filename of a anchor file in the format that the ssl module
+ expects.
+ """
+
+ trust_anchor = None
+
+ def connect(self):
+ assert self.trust_anchor is not None
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+ if getattr(self, "_tunnel_host", None):
+ self.sock = sock
+ self._tunnel()
+ self.sock = ssl.wrap_socket(sock,
+ keyfile = self.key_file,
+ certfile = self.cert_file,
+ cert_reqs = ssl.CERT_REQUIRED,
+ ssl_version = ssl.PROTOCOL_TLSv1,
+ ca_certs = self.trust_anchor)
+
+
+class main(object):
+ """
+ Main program.
+ """
+
+ # Environmental parameters
+
+ top = os.path.realpath(os.path.join((sys.path[0] or "."), ".."))
+ cwd = os.getcwd()
+
+ # Parameters that we might want to get from a config file.
+ # Just wire them all in for the moment.
+
+ base_url = "https://demo.rpki.net/"
+ myrpki_url = base_url + "rpki/"
+ auth_url = myrpki_url + "demo/login"
+ example_myrpki_cfg = "%s/rpkid/examples/rpki.conf" % top
+ working_dir = "%s/rpkidemo-data" % cwd
+ myrpki_py = "%s/rpkid/myrpki.py" % top
+ user_agent = "RPKIDemo"
+ delay = 15
+ trust_anchor = "%s/scripts/rpkidemo.pem" % top
+
+ openssl = None
+
+ def setup_openssl(self):
+ """
+ Find a usable version of OpenSSL, or build one if we must.
+ """
+
+ def scrape(*args):
+ return subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT).communicate()[0]
+
+ def usable_openssl(f):
+ return f is not None and os.path.exists(f) and "-ss_cert" in scrape(f, "ca", "-?") and "Usage cms" in scrape(f, "cms", "-?")
+
+ for d in os.environ["PATH"].split(":"):
+ f = os.path.join(d, "openssl")
+ if usable_openssl(f):
+ self.openssl = f
+ break
+
+ if self.openssl is None:
+ print "Couldn't find usable openssl on path, attempting to build one"
+ subprocess.check_call(("./configure",), cwd = self.top)
+ subprocess.check_call(("make",), cwd = os.path.join(self.top, "openssl"))
+ self.openssl = os.path.join(self.top, "openssl", "openssl", "apps", "openssl")
+ print "Done building openssl"
+ print
+
+ if usable_openssl(self.openssl):
+ print "Using", self.openssl
+ else:
+ sys.exit("Could not find or build usable version of openssl, giving up")
+
+ @staticmethod
+ def setup_utc():
+ """
+ This script thinks in UTC.
+ """
+
+ os.environ["TZ"] = "UTC"
+ time.tzset()
+
+ def setup_username(self):
+ """
+ Get username and password for web interface, construct urllib2
+ "opener" tailored for our use, perform an initial GET (ignoring
+ result, other than exceptions) to test the username and password.
+ """
+
+ print "I need to know your username and password on the Django GUI server to proceed"
+
+ while True:
+
+ try:
+ self.username = raw_input("Username: ")
+ self.password = getpass.getpass()
+
+ handlers = []
+
+ self.cookiejar = cookielib.CookieJar()
+ handlers.append(urllib2.HTTPCookieProcessor(self.cookiejar))
+
+ if have_ssl_module:
+
+ class HTTPSConnection(AbstractHTTPSConnection):
+ trust_anchor = self.trust_anchor
+
+ class HTTPSHandler(urllib2.HTTPSHandler):
+ def https_open(self, req):
+ return self.do_open(HTTPSConnection, req)
+
+ handlers.append(HTTPSHandler)
+
+ self.opener = urllib2.build_opener(*handlers)
+
+ # Test login credentials
+ resp = self.opener.open(self.auth_url) # GET
+
+ r = self.opener.open(urllib2.Request(
+ url = self.auth_url,
+ data = urllib.urlencode({ "username" : self.username,
+ "password" : self.password,
+ "csrfmiddlewaretoken" : self.csrftoken() }),
+ headers = { "Referer" : self.auth_url,
+ "User-Agent" : self.user_agent})) # POST
+ return
+
+ except urllib2.URLError, e:
+ print "Could not log in to server: %s" % e
+ print "Please try again"
+ save_error(e)
+
+ def csrftoken(self):
+ """
+ Pull Django's CSFR token from cookie database.
+
+ Django's login form requires the "csrfmiddlewaretoken." It turns out
+ this is the same value as the "csrftoken" cookie, so we don't need
+ to bother parsing the form.
+ """
+
+ return [c.value for c in self.cookiejar if c.name == "csrftoken"][0]
+
+ def setup_working_directory(self):
+ """
+ Create working directory and move to it.
+ """
+
+ try:
+ print "Creating", self.working_dir
+ os.mkdir(self.working_dir)
+ except OSError, e:
+ if e.errno != errno.EEXIST:
+ raise
+ print self.working_dir, "already exists, reusing it"
+ os.chdir(self.working_dir)
+
+ def setup_config_file(self):
+ """
+ Generate rpki.conf
+ """
+
+ if os.path.exists("rpki.conf"):
+ print "You already have a rpki.conf file, so I will use it"
+ return
+
+ print "Generating rpki.conf"
+ section_regexp = re.compile("\s*\[\s*(.+?)\s*\]\s*$")
+ variable_regexp = re.compile("\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$")
+ f = open("rpki.conf", "w")
+ f.write("# Automatically generated, do not edit\n")
+ section = None
+ for line in open(self.example_myrpki_cfg):
+ m = section_regexp.match(line)
+ if m:
+ section = m.group(1)
+ m = variable_regexp.match(line)
+ option = m.group(1) if m and section == "myrpki" else None
+ value = m.group(2) if option else None
+ if option == "handle":
+ line = "handle = %s\n" % self.username
+ if option == "openssl":
+ line = "openssl = %s\n" % self.openssl
+ if option in ("run_rpkid", "run_pubd", "run_rootd") and value != "false":
+ line = "%s = false\n" % option
+ f.write(line)
+ f.close()
+
+ def myrpki(self, *cmd):
+ """
+ Run a myrpki command.
+ """
+ return subprocess.check_call((sys.executable, self.myrpki_py) + cmd)
+
+ def upload(self, url, filename):
+ """
+ Upload filename to URL, return result.
+ """
+
+ url = "%s%s/%s" % (self.myrpki_url, url, self.username)
+ data = open(filename).read()
+ print "Uploading", filename, "to", url
+ post_data = urllib.urlencode({
+ "content" : data,
+ "csrfmiddlewaretoken" : self.csrftoken() }) # POST
+ try:
+ return self.opener.open(urllib2.Request(url, post_data, {
+ "User-Agent" : self.user_agent,
+ "Referer" : url}))
+ except urllib2.HTTPError, e:
+ sys.stderr.write("Problem uploading to URL %s\n" % url)
+ save_error(e)
+ raise
+
+ def update(self):
+ """
+ Run configure_resources, upload result, download updated result.
+ """
+
+ self.myrpki("configure_resources")
+ r = self.upload("demo/myrpki-xml", "myrpki.xml")
+ save("myrpki.xml", r.read())
+
+ def setup_csv_files(self):
+ """
+ Create CSV file objects and synchronize timestamps.
+ """
+
+ self.csv_files = [
+ CSV_File("asns.csv", "demo/down/asns/%s" % self.username),
+ CSV_File("prefixes.csv", "demo/down/prefixes/%s" % self.username),
+ CSV_File("roas.csv", "demo/down/roas/%s" % self.username) ]
+
+ def upload_for_response(self, url, path):
+ """
+ Upload an XML file to the requested URL and wait for for the server
+ to signal that a response is ready.
+ """
+
+ self.upload(url, path)
+
+ print """
+ Waiting for response to upload. This may require action by a human
+ being on the server side, so it may take a while, please be patient.
+ """
+
+ while True:
+ try:
+ return self.opener.open(urllib2.Request(
+ "%s%s/%s" % (self.myrpki_url, url, self.username),
+ None,
+ { "User-Agent" : self.user_agent }))
+ except urllib2.HTTPError, e:
+ # Portal GUI uses response code 503 to signal "not ready"
+ if e.code != 503:
+ sys.stderr.write("Problem getting response from %s: %s\n" % (url, e))
+ save_error(e)
+ raise
+ time.sleep(self.delay)
+
+ def setup_parent(self):
+ """
+ Upload the user's identity.xml and wait for the portal gui to send
+ back the parent.xml response.
+ """
+
+ r = self.upload_for_response("demo/parent-request", "entitydb/identity.xml")
+ parent_data = r.read()
+ save("parent.xml", parent_data)
+ self.myrpki("configure_parent", "parent.xml")
+
+ # Extract the parent_handle from the xml response and save it for use by
+ # setup_repository()
+ self.parent_handle = ElementFromString(parent_data).get("parent_handle")
+
+ def setup_repository(self):
+ """
+ Upload the repository referral to the portal-gui and wait the
+ response from the repository operator.
+ """
+
+ r = self.upload_for_response("demo/repository-request", "entitydb/repositories/%s.xml" % self.parent_handle)
+ save("repository.xml", r.read())
+ self.myrpki("configure_repository", "repository.xml")
+
+ def poll(self, csv_file):
+ """
+ Poll for new version of a CSV file, save if changed, return
+ boolean indicating whether file has changed.
+ """
+
+ try:
+ url = self.myrpki_url + csv_file.url
+ r = self.opener.open(urllib2.Request(url, None, {
+ "If-Modified-Since" : csv_file.last_modified(),
+ "User-Agent" : self.user_agent }))
+ timestamp = time.mktime(r.info().getdate("Last-Modified"))
+ csv_file.store(timestamp, r.read())
+ return True
+ except urllib2.HTTPError, e:
+ if e.code == 304: # 304 == "Not Modified"
+ return False
+ else:
+ sys.stderr.write("Problem polling URL %s\n" % url)
+ save_error(e)
+ raise
+
+ def poll_loop(self):
+ """
+ Loop forever, polling for updates.
+ """
+
+ while True:
+ changed = False
+ for csv_file in self.csv_files:
+ if self.poll(csv_file):
+ changed = True
+ if changed:
+ self.update()
+ time.sleep(self.delay)
+
+ def getopt(self):
+ """
+ Parse options.
+ """
+ opts, argv = getopt.getopt(sys.argv[1:], "hi?", ["help"])
+ for o, a in opts:
+ if o in ("-h", "--help", "-?"):
+ print __doc__
+ sys.exit(0)
+ if argv:
+ sys.exit("Unexpected arguments %r" % (argv,))
+
+ def __init__(self):
+ self.getopt()
+ self.setup_utc()
+ self.setup_openssl()
+ self.setup_username()
+ self.setup_working_directory()
+ self.setup_config_file()
+ self.setup_csv_files()
+ self.myrpki("initialize")
+ self.setup_parent()
+ self.setup_repository()
+ self.update()
+ self.update()
+
+ webbrowser.open(self.myrpki_url)
+
+ self.poll_loop()
+
+main()
+
+# Local Variables:
+# mode:python
+# End:
+
+# vim:sw=2 ts=8 expandtab
diff --git a/potpourri/rpkidemo.pem b/potpourri/rpkidemo.pem
new file mode 100644
index 00000000..f96fdb9e
--- /dev/null
+++ b/potpourri/rpkidemo.pem
@@ -0,0 +1,23 @@
+-----BEGIN CERTIFICATE-----
+MIIDxjCCAy+gAwIBAgIJALc3/mkoVFOMMA0GCSqGSIb3DQEBBQUAMIGaMR4wHAYD
+VQQDExVSR25ldC9QU0duZXQgU2VjdXJpdHkxCzAJBgNVBAYTAlVTMQswCQYDVQQI
+EwJXQTEaMBgGA1UEBxMRQmFpbmJyaWRnZSBJc2xhbmQxEzARBgNVBAoTClJHbmV0
+LCBMTEMxDzANBgNVBAsTBlBTR25ldDEcMBoGCSqGSIb3DQEJARYNcmFuZHlAcHNn
+LmNvbTAeFw0xMTAyMjcwNDMxMzhaFw0yMTAyMjQwNDMxMzhaMIGaMR4wHAYDVQQD
+ExVSR25ldC9QU0duZXQgU2VjdXJpdHkxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJX
+QTEaMBgGA1UEBxMRQmFpbmJyaWRnZSBJc2xhbmQxEzARBgNVBAoTClJHbmV0LCBM
+TEMxDzANBgNVBAsTBlBTR25ldDEcMBoGCSqGSIb3DQEJARYNcmFuZHlAcHNnLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw/lD46076mS4vDHI943dlNPh
+KN1EY0wnx7nDga7jh4x8U5gt+MdvdH+kRABR7KVjkaUYKcd+DlAQeNLYXXkXOECz
+yN90PgADWucPij6GJn7o9xwNsh2yAhICzqc19RgiKPVJL9V5xWBwKgzkpGG7dcUm
+97TjZwhj8B8xcHjVbJ8CAwEAAaOCARAwggEMMAwGA1UdEwQFMAMBAf8wCwYDVR0P
+BAQDAgEGMB0GA1UdDgQWBBRUmkatFo7oAUl5SJqUCfAC0LpkgDCBzwYDVR0jBIHH
+MIHEgBRUmkatFo7oAUl5SJqUCfAC0LpkgKGBoKSBnTCBmjEeMBwGA1UEAxMVUkdu
+ZXQvUFNHbmV0IFNlY3VyaXR5MQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExGjAY
+BgNVBAcTEUJhaW5icmlkZ2UgSXNsYW5kMRMwEQYDVQQKEwpSR25ldCwgTExDMQ8w
+DQYDVQQLEwZQU0duZXQxHDAaBgkqhkiG9w0BCQEWDXJhbmR5QHBzZy5jb22CCQC3
+N/5pKFRTjDANBgkqhkiG9w0BAQUFAAOBgQBHBN06mk++v2fb3DnDiwt0Xqna4oNH
+LNN69VaKLHhQ8J0shO4386E9ejLTutWO5VCdRim3a7WuneYSIncFBY76ddipWmuK
+chitDDRUn/ccx4pkPoiHBGqthMqSbNGVsamAMOAJzzPyGXdur46wpzz6DtWObsQg
+2/Q6evShgNYmtg==
+-----END CERTIFICATE-----
diff --git a/potpourri/rpkigui-flatten-roas.py b/potpourri/rpkigui-flatten-roas.py
new file mode 100644
index 00000000..e21c368b
--- /dev/null
+++ b/potpourri/rpkigui-flatten-roas.py
@@ -0,0 +1,37 @@
+from rpki.gui.script_util import setup
+setup()
+
+from django.db import transaction
+from django.db.models import Count
+from rpki.gui.app.models import ROARequest
+from rpki.irdb.zookeeper import Zookeeper
+
+handles = set()
+
+
+@transaction.commit_on_success
+def flatten():
+ for roa in ROARequest.objects.annotate(num_prefixes=Count('prefixes')).filter(num_prefixes__gt=1):
+ print 'splitting roa for AS%d' % roa.asn
+ for pfx in roa.prefixes.all():
+ # create new roa objects for each prefix
+ newroa = ROARequest.objects.create(
+ issuer=roa.issuer,
+ asn=roa.asn)
+ newroa.prefixes.create(
+ version=pfx.version,
+ prefix=pfx.prefix,
+ prefixlen=pfx.prefixlen,
+ max_prefixlen=pfx.max_prefixlen
+ )
+ roa.delete()
+ handles.add(roa.issuer.handle)
+
+flatten()
+
+if handles:
+ # poke rpkid to run the cron job for each handle that had a roa change
+ z = Zookeeper()
+ for h in handles:
+ z.reset_identity(h)
+ z.run_rpkid_now()
diff --git a/potpourri/rpkigui-reset-demo.py b/potpourri/rpkigui-reset-demo.py
new file mode 100644
index 00000000..0a3a1537
--- /dev/null
+++ b/potpourri/rpkigui-reset-demo.py
@@ -0,0 +1,48 @@
+# Copyright (C) 2012, 2013, 2014 SPARTA, Inc. a Parsons Company
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+This script is used to reset all of the labuser* accounts on demo.rpki.net back
+to a state suitable for a new workshop. It removes all ROAs and Ghostbuster
+issued by the labuser accounts.
+
+"""
+
+__version__ = '$Id$'
+
+from rpki.gui.script_util import setup
+setup()
+
+import sys
+
+from rpki.gui.app.models import Conf
+from rpki.irdb.models import ROARequest, GhostbusterRequest
+from rpki.gui.app.glue import list_received_resources
+
+for n in xrange(1, 33):
+ username = 'labuser%02d' % n
+ print 'removing objects for ' + username
+ for cls in (ROARequest, GhostbusterRequest):
+ cls.objects.filter(issuer__handle=username).delete()
+ conf = Conf.objects.get(handle=username)
+ conf.clear_alerts()
+ print '... updating resource certificate cache'
+ list_received_resources(sys.stdout, conf)
+
+ # Remove delegated resources (see https://trac.rpki.net/ticket/544)
+ # Note that we do not remove the parent-child relationship, just the
+ # resources.
+ for child in conf.children():
+ child.asns.delete()
+ child.address_ranges.delete()
diff --git a/potpourri/rpkigui-sync-users.py b/potpourri/rpkigui-sync-users.py
new file mode 100644
index 00000000..9c636e95
--- /dev/null
+++ b/potpourri/rpkigui-sync-users.py
@@ -0,0 +1,32 @@
+# Copyright (C) 2013 SPARTA, Inc. a Parsons Company
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+__version__ = '$Id$'
+
+"""
+Ensure that a web login exists for labuser* resource holder
+"""
+from rpki.gui.script_util import setup
+setup()
+
+from django.contrib.auth.models import User
+from rpki.gui.app.models import Conf, ConfACL
+
+# mysql> select * from irdb_resourceholderca left outer join auth_user on irdb_resourceholderca.handle = auth_user.username where username=NULL;
+
+for conf in Conf.objects.filter(handle__startswith='labuser'):
+ if not User.objects.filter(username=conf.handle).exists():
+ print 'creating matching user for ' + conf.handle
+ user = User.objects.create_user(conf.handle, password='fnord')
+ ConfACL.objects.create(conf=conf, user=user)
diff --git a/potpourri/rrd-rcynic-history.py b/potpourri/rrd-rcynic-history.py
new file mode 100644
index 00000000..8a0d50a8
--- /dev/null
+++ b/potpourri/rrd-rcynic-history.py
@@ -0,0 +1,201 @@
+# $Id$
+#
+# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Parse traffic data out of rynic XML output, whack it a bit, and stuff
+it into one or more RRDs (Round Robin Databases -- see rrdtool).
+
+Haven't decided yet whether to draw the resulting pictures here or
+elsewhere.
+
+This is an initial adaptation of analyze-rcynic-history.py, which uses
+gnuplot and a shelve database. It's also my first attempt to do
+anything with rrdtool, so no doubt I'll get half of it wrong.
+"""
+
+import mailbox
+import sys
+import urlparse
+import os
+import time
+
+from xml.etree.cElementTree import (ElementTree as ElementTree,
+ fromstring as ElementTreeFromString)
+
+os.putenv("TZ", "UTC")
+time.tzset()
+
+def parse_utc(s):
+ return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ")))
+
+class Host(object):
+ """
+ A host object represents all the data collected for one host for a given session.
+ """
+
+ def __init__(self, hostname, timestamp):
+ self.hostname = hostname
+ self.timestamp = timestamp
+ self.elapsed = 0
+ self.connections = 0
+ self.failures = 0
+ self.uris = set()
+
+ def add_connection(self, elt):
+ self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started"))
+ self.connections += 1
+ if elt.get("error") is not None:
+ self.failures += 1
+
+ def add_object_uri(self, u):
+ self.uris.add(u)
+
+ @property
+ def failed(self):
+ return 1 if self.failures > 0 else 0
+
+ @property
+ def objects(self):
+ return len(self.uris)
+
+ field_table = (("timestamp", None, None, None),
+ ("connections", "GAUGE", "Connections", "FF0000"),
+ ("objects", "GAUGE", "Objects", "00FF00"),
+ ("elapsed", "GAUGE", "Fetch Time", "0000FF"),
+ ("failed", "ABSOLUTE", "Failed", "00FFFF"))
+
+ @property
+ def field_values(self):
+ return tuple(str(getattr(self, field[0])) for field in self.field_table)
+
+ @classmethod
+ def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"):
+ return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum)
+ for field in cls.field_table if field[1] is not None]
+
+ @classmethod
+ def field_graph_specifiers(cls, hostname):
+ result = []
+ for field in cls.field_table:
+ if field[1] is not None:
+ result.append("DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], hostname, field[0]))
+ result.append("'LINE1:%s#%s:%s'" % (field[0], field[3], field[2]))
+ return result
+
+ def save(self, rrdtable):
+ rrdtable.add(self.hostname, self.field_values)
+
+class Session(dict):
+ """
+ A session corresponds to one XML file. This is a dictionary of Host
+ objects, keyed by hostname.
+ """
+
+ def __init__(self, timestamp):
+ dict.__init__(self)
+ self.timestamp = timestamp
+
+ @property
+ def hostnames(self):
+ return set(self.iterkeys())
+
+ def add_connection(self, elt):
+ hostname = urlparse.urlparse(elt.text.strip()).hostname
+ if hostname not in self:
+ self[hostname] = Host(hostname, self.timestamp)
+ self[hostname].add_connection(elt)
+
+ def add_object_uri(self, u):
+ h = urlparse.urlparse(u).hostname
+ if h and h in self:
+ self[h].add_object_uri(u)
+
+ def save(self, rrdtable):
+ for h in self.itervalues():
+ h.save(rrdtable)
+
+class RRDTable(dict):
+ """
+ Final data we're going to be sending to rrdtool. We need to buffer
+ it until we're done because we have to sort it. Might be easier
+ just to sort the maildir, then again it might be easier to get rid
+ of the maildir too once we're dealing with current data. We'll see.
+ """
+
+ def __init__(self, rrdtool = sys.stdout):
+ dict.__init__(self)
+ self.rrdtool = rrdtool
+
+ def add(self, hostname, data):
+ if hostname not in self:
+ self[hostname] = []
+ self[hostname].append(data)
+
+ def sort(self):
+ for data in self.itervalues():
+ data.sort()
+
+ @property
+ def oldest(self):
+ return min(min(datum[0] for datum in data) for data in self.itervalues())
+
+ rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps for steps in (1, 4, 24))
+
+ def create(self):
+ start = self.oldest
+ ds_list = Host.field_ds_specifiers()
+ ds_list.extend(self.rras)
+ for hostname in self:
+ if not os.path.exists("%s.rrd" % hostname):
+ self.rrdtool("create %s.rrd --start %s --step 3600 %s\n" % (hostname, start, " ".join(ds_list)))
+
+ def update(self):
+ for hostname, data in self.iteritems():
+ for datum in data:
+ self.rrdtool("update %s.rrd %s\n" % (hostname, ":".join(str(d) for d in datum)))
+
+ def graph(self):
+ for hostname in self:
+ self.rrdtool("graph %s.png --start -90d %s\n" % (hostname, " ".join(Host.field_graph_specifiers(hostname))))
+
+mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False)
+
+rrdtable = RRDTable()
+
+for i, key in enumerate(mb.iterkeys(), 1):
+ sys.stderr.write("\r%s %d/%d..." % ("|\\-/"[i & 3], i, len(mb)))
+
+ assert not mb[key].is_multipart()
+ input = ElementTreeFromString(mb[key].get_payload())
+ date = input.get("date")
+ sys.stderr.write("%s..." % date)
+ session = Session(parse_utc(date))
+ for elt in input.findall("rsync_history"):
+ session.add_connection(elt)
+ for elt in input.findall("validation_status"):
+ if elt.get("generation") == "current":
+ session.add_object_uri(elt.text.strip())
+ session.save(rrdtable)
+
+ # XXX
+ #if i > 4: break
+
+sys.stderr.write("\n")
+
+rrdtable.create()
+rrdtable.sort()
+rrdtable.update()
+rrdtable.graph()
diff --git a/potpourri/setup-rootd.sh b/potpourri/setup-rootd.sh
new file mode 100644
index 00000000..41a271b8
--- /dev/null
+++ b/potpourri/setup-rootd.sh
@@ -0,0 +1,36 @@
+#!/bin/sh -
+#
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Setting up rootd requires cross-certifying rpkid's resource-holding
+# BPKI trust anchor under the BPKI trust anchor that rootd uses. This
+# script handles that, albiet in a very ugly way.
+#
+# Filenames are wired in, you might need to change these if you've
+# done something more complicated.
+
+export RANDFILE=.OpenSSL.whines.unless.I.set.this
+export BPKI_DIRECTORY=`pwd`/bpki/servers
+
+openssl=../openssl/openssl/apps/openssl
+
+$openssl ca -notext -batch -config rpki.conf \
+ -ss_cert bpki/resources/ca.cer \
+ -out $BPKI_DIRECTORY/child.cer \
+ -extensions ca_x509_ext_xcert0
+
+$openssl x509 -noout -text -in $BPKI_DIRECTORY/child.cer
diff --git a/potpourri/show-cms-ee-certs.sh b/potpourri/show-cms-ee-certs.sh
new file mode 100644
index 00000000..4f5168c7
--- /dev/null
+++ b/potpourri/show-cms-ee-certs.sh
@@ -0,0 +1,25 @@
+#!/bin/sh -
+#
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+openssl=../openssl/openssl/apps/openssl
+
+for i in "$@"
+do
+ $openssl cms -verify -noverify -out /dev/null -inform DER -certsout /dev/stdout -in $1 |
+ $openssl x509 -noout -text -certopt no_sigdump,no_pubkey
+done
diff --git a/potpourri/show-tracking-data.py b/potpourri/show-tracking-data.py
new file mode 100644
index 00000000..07e0a144
--- /dev/null
+++ b/potpourri/show-tracking-data.py
@@ -0,0 +1,39 @@
+# $Id$
+#
+# Copyright (C) 2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Show tracking data for a bunch of objects retrieved by rcynic.
+
+This script takes one required argument, which is the name of a
+directory tree containing the validated outpt of an rcynic run. If
+you follow the default naming scheme this will be
+/some/where/rcynic-data/authenticated.
+"""
+
+import os
+import sys
+import rpki.x509
+import rpki.sundial
+
+rcynic_dir = sys.argv[1]
+
+for root, dirs, files in os.walk(rcynic_dir):
+ for f in files:
+ path = os.path.join(root, f)
+ date = rpki.sundial.datetime.utcfromtimestamp(os.stat(path).st_mtime)
+ uri = "rsync://" + path[len(rcynic_dir):].lstrip("/")
+ obj = rpki.x509.uri_dispatch(uri)(DER_file = path)
+ print date, obj.tracking_data(uri)
diff --git a/potpourri/signed-object-dates.py b/potpourri/signed-object-dates.py
new file mode 100644
index 00000000..fefd9448
--- /dev/null
+++ b/potpourri/signed-object-dates.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# $Id$
+
+# Extract notBefore, and notAfter values from an RPKI signed object;
+# if the object is a manifest, also extract thisUpdate and nextUpdate.
+
+# Copyright (C) 2013 Dragon Research Labs ("DRL")
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+import sys
+import rpki.POW
+
+extract_flags = (rpki.POW.CMS_NOCRL |
+ rpki.POW.CMS_NO_SIGNER_CERT_VERIFY |
+ rpki.POW.CMS_NO_ATTR_VERIFY |
+ rpki.POW.CMS_NO_CONTENT_VERIFY)
+
+for fn in sys.argv[1:]:
+ cls = rpki.POW.Manifest if fn.endswith(".mft") else rpki.POW.CMS
+ cms = cls.derReadFile(fn)
+ cer = cms.certs()[0]
+ print fn
+ print " notBefore: ", cer.getNotBefore()
+ if fn.endswith(".mft"):
+ cms.verify(rpki.POW.X509Store(), None, extract_flags)
+ print " thisUpdate:", cms.getThisUpdate()
+ print " nextUpdate:", cms.getNextUpdate()
+ print " notAfter: ", cer.getNotAfter()
+ print
diff --git a/potpourri/splitbase64.xsl b/potpourri/splitbase64.xsl
new file mode 100644
index 00000000..2b2d2fb1
--- /dev/null
+++ b/potpourri/splitbase64.xsl
@@ -0,0 +1,66 @@
+<!-- -*- SGML -*-
+ - $Id$
+ -
+ - Copyright (C) 2009 Internet Systems Consortium ("ISC")
+ -
+ - Permission to use, copy, modify, and distribute this software for any
+ - purpose with or without fee is hereby granted, provided that the above
+ - copyright notice and this permission notice appear in all copies.
+ -
+ - THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+ - REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+ - AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+ - INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+ - LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+ - OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+ - PERFORMANCE OF THIS SOFTWARE.
+ -->
+
+<!--
+ - Split Base64 XML text elements into reasonable length chunks, to
+ - make the result more readable, allow halfway-sane comparisions of
+ - XML using diff, etc. Makes no attempt to distinguish Base64 from
+ - other text, so not suitable for use on XML with text elements that
+ - are -not- Base64. Piping output of this transform into xmlindent
+ - produces something halfway readable. YMMV.
+ -->
+
+<xsl:transform xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+
+ <xsl:output method="xml"/>
+
+ <xsl:param name="width" select="64"/>
+
+ <xsl:template match="text()">
+ <xsl:text>&#10;</xsl:text>
+ <xsl:call-template name="wrap">
+ <xsl:with-param name="input" select="translate(normalize-space(), ' ', '')"/>
+ </xsl:call-template>
+ </xsl:template>
+
+ <xsl:template match="node()|@*">
+ <xsl:copy>
+ <xsl:copy-of select="@*"/>
+ <xsl:apply-templates/>
+ </xsl:copy>
+ </xsl:template>
+
+ <xsl:template name="wrap">
+ <xsl:param name="input"/>
+ <xsl:text> </xsl:text>
+ <xsl:choose>
+ <xsl:when test="string-length($input) > $width">
+ <xsl:value-of select="substring($input, 1, $width)"/>
+ <xsl:text>&#10;</xsl:text>
+ <xsl:call-template name="wrap">
+ <xsl:with-param name="input" select="substring($input, $width+1)"/>
+ </xsl:call-template>
+ </xsl:when>
+ <xsl:otherwise>
+ <xsl:value-of select="$input"/>
+ <xsl:text>&#10;</xsl:text>
+ </xsl:otherwise>
+ </xsl:choose>
+ </xsl:template>
+
+</xsl:transform>
diff --git a/potpourri/testbed-rootcert.py b/potpourri/testbed-rootcert.py
new file mode 100644
index 00000000..0716be2f
--- /dev/null
+++ b/potpourri/testbed-rootcert.py
@@ -0,0 +1,66 @@
+# $Id$
+#
+# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Generate config for a test RPKI root certificate for resources
+specified in asns.csv and prefixes.csv.
+
+This script is separate from arin-to-csv.py so that we can convert on
+the fly rather than having to pull the entire database into memory.
+"""
+
+import sys
+from rpki.csv_utils import csv_reader
+
+if len(sys.argv) not in (2, 4):
+ sys.exit("Usage: %s holder [asns.csv prefixes.csv]" % sys.argv[0])
+
+print '''\
+[req]
+default_bits = 2048
+default_md = sha256
+distinguished_name = req_dn
+prompt = no
+encrypt_key = no
+
+[req_dn]
+CN = Pseudo-%(HOLDER)s testbed root RPKI certificate
+
+[x509v3_extensions]
+basicConstraints = critical,CA:true
+subjectKeyIdentifier = hash
+keyUsage = critical,keyCertSign,cRLSign
+subjectInfoAccess = 1.3.6.1.5.5.7.48.5;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/,1.3.6.1.5.5.7.48.10;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/root.mft
+certificatePolicies = critical,1.3.6.1.5.5.7.14.2
+sbgp-autonomousSysNum = critical,@rfc3779_asns
+sbgp-ipAddrBlock = critical,@rfc3997_addrs
+
+[rfc3779_asns]
+''' % { "holder" : sys.argv[1].lower(),
+ "HOLDER" : sys.argv[1].upper() }
+
+for i, asn in enumerate(asn for handle, asn in csv_reader(sys.argv[2] if len(sys.argv) > 2 else "asns.csv", columns = 2)):
+ print "AS.%d = %s" % (i, asn)
+
+print '''\
+
+[rfc3997_addrs]
+
+'''
+
+for i, prefix in enumerate(prefix for handle, prefix in csv_reader(sys.argv[3] if len(sys.argv) > 2 else "prefixes.csv", columns = 2)):
+ v = 6 if ":" in prefix else 4
+ print "IPv%d.%d = %s" % (v, i, prefix)
diff --git a/potpourri/translate-handles.py b/potpourri/translate-handles.py
new file mode 100644
index 00000000..49848277
--- /dev/null
+++ b/potpourri/translate-handles.py
@@ -0,0 +1,49 @@
+# $Id$
+#
+# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Translate handles from the ones provided in a database dump into the
+ones we use in our testbed. This has been broken out into a separate
+program for two reasons:
+
+- Conversion of some of the RIR data is a very slow process, and it's
+ both annoying and unnecessary to run it every time we add a new
+ participant to the testbed.
+
+- This handle translation business now has fingers into half a dozen
+ scripts, so it needs refactoring in any case, either as a common
+ library function or as a separate script.
+
+This program takes a list of .CSV files on its command line, and
+rewrites them as needed after performing the translation.
+"""
+
+import os
+import sys
+from rpki.csv_utils import csv_reader, csv_writer
+
+translations = dict((src, dst) for src, dst in csv_reader("translations.csv", columns = 2))
+
+for filename in sys.argv[1:]:
+
+ f = csv_writer(filename)
+
+ for cols in csv_reader(filename):
+ if cols[0] in translations:
+ cols[0] = translations[cols[0]]
+ f.writerow(cols)
+
+ f.close()
diff --git a/potpourri/upgrade-add-ghostbusters.py b/potpourri/upgrade-add-ghostbusters.py
new file mode 100644
index 00000000..a8c8a92b
--- /dev/null
+++ b/potpourri/upgrade-add-ghostbusters.py
@@ -0,0 +1,73 @@
+# $Id$
+#
+# Copyright (C) 2009--2011 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Add SQL tables needed for Ghostbusters support.
+Most of the code here lifted from rpki-sql-setup.py
+"""
+
+import getopt, sys, rpki.config, warnings
+
+from rpki.mysql_import import MySQLdb
+
+def fix(name, *statements):
+ db = MySQLdb.connect(db = cfg.get("sql-database", section = name),
+ user = cfg.get("sql-username", section = name),
+ passwd = cfg.get("sql-password", section = name))
+ cur = db.cursor()
+ for statement in statements:
+ cur.execute(statement)
+ db.commit()
+ db.close()
+
+cfg_file = None
+
+opts, argv = getopt.getopt(sys.argv[1:], "c:h?", ["config=", "help"])
+for o, a in opts:
+ if o in ("-h", "--help", "-?"):
+ print __doc__
+ sys.exit(0)
+ if o in ("-c", "--config"):
+ cfg_file = a
+
+cfg = rpki.config.parser(cfg_file, "myrpki")
+
+fix("irdbd", """
+ CREATE TABLE ghostbuster_request (
+ ghostbuster_request_id SERIAL NOT NULL,
+ self_handle VARCHAR(40) NOT NULL,
+ parent_handle VARCHAR(40),
+ vcard LONGBLOB NOT NULL,
+ PRIMARY KEY (ghostbuster_request_id)
+ ) ENGINE=InnoDB;
+""")
+
+fix("rpkid", """
+ CREATE TABLE ghostbuster (
+ ghostbuster_id SERIAL NOT NULL,
+ vcard LONGBLOB NOT NULL,
+ cert LONGBLOB NOT NULL,
+ ghostbuster LONGBLOB NOT NULL,
+ published DATETIME,
+ self_id BIGINT UNSIGNED NOT NULL,
+ ca_detail_id BIGINT UNSIGNED NOT NULL,
+ PRIMARY KEY (ghostbuster_id),
+ CONSTRAINT ghostbuster_self_id
+ FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
+ CONSTRAINT ghostbuster_ca_detail_id
+ FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
+ ) ENGINE=InnoDB;
+""")
diff --git a/potpourri/verify-bpki.sh b/potpourri/verify-bpki.sh
new file mode 100755
index 00000000..0e36d796
--- /dev/null
+++ b/potpourri/verify-bpki.sh
@@ -0,0 +1,43 @@
+#!/bin/sh -
+# $Id$
+#
+# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+# Tests of generated BPKI certificates. Kind of cheesy, but does test
+# the basic stuff.
+
+exec 2>&1
+
+for bpki in bpki/*
+do
+ crls=$(find $bpki -name '*.crl')
+
+ # Check that CRLs verify properly
+ for crl in $crls
+ do
+ echo -n "$crl: "
+ openssl crl -CAfile $bpki/ca.cer -noout -in $crl
+ done
+
+ # Check that issued certificates verify properly
+ cat $bpki/ca.cer $crls | openssl verify -crl_check -CAfile /dev/stdin $(find $bpki -name '*.cer' ! -name 'ca.cer' ! -name '*.cacert.cer')
+
+done
+
+# Check that cross-certified BSC certificates verify properly
+if test -d bpki/servers
+then
+ cat bpki/servers/xcert.*.cer | openssl verify -verbose -CAfile bpki/servers/ca.cer -untrusted /dev/stdin bpki/resources/bsc.*.cer
+fi
diff --git a/potpourri/whack-ripe-asns.py b/potpourri/whack-ripe-asns.py
new file mode 100644
index 00000000..9c702271
--- /dev/null
+++ b/potpourri/whack-ripe-asns.py
@@ -0,0 +1,83 @@
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Fix problems in asns.csv generated from RIPE's database.
+
+RIPE's database contains inconsistancies, overlaps, and format errors
+that make it impossible to feed the output of ripe-to-csv.awk directly
+into testbed-rootcert.py without OpenSSL rejecting the resulting
+root.conf. This script takes a brute force approach to fixing this:
+it converts all ASNs to range form, runs the resulting file through
+the unix sort program to put the data into canonical order, then reads
+it back, merging overlaps, and writing the result in a form acceptable
+to testbed-rootcert.py.
+
+Since we're doing all this anyway, the script also merges adjacent
+blocks.
+
+Ordinarily, it would be dangerous to have the same program act as both
+the source and sink of a pipeline, particularly for such a large data
+set, as the chance of deadlock would approach 100%, but in this case
+we know that the sort program must consume and buffer (somehow) all of
+its input before writing a single line of output, so a single script
+can safely act as a filter both before and after sort.
+"""
+
+import sys, subprocess
+
+sorter = subprocess.Popen(("sort", "-T.", "-n"),
+ stdin = subprocess.PIPE,
+ stdout = subprocess.PIPE)
+
+for line in sys.stdin:
+ handle, asn = line.split()
+
+ if "-" in asn:
+ range_min, range_max = asn.split("-")
+ else:
+ range_min, range_max = asn, asn
+
+ sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max)))
+
+sorter.stdin.close()
+
+prev_min = None
+prev_max = None
+
+def show():
+ if prev_min and prev_max:
+ sys.stdout.write("x\t%s-%s\n" % (prev_min, prev_max))
+
+for line in sorter.stdout:
+ this_min, this_max = line.split()
+ this_min = long(this_min)
+ this_max = long(this_max)
+
+ if prev_min and prev_max and prev_max + 1 >= this_min:
+ prev_min = min(prev_min, this_min)
+ prev_max = max(prev_max, this_max)
+
+ else:
+ show()
+ prev_min = this_min
+ prev_max = this_max
+
+show()
+
+sorter.stdout.close()
+
+sys.exit(sorter.wait())
diff --git a/potpourri/whack-ripe-prefixes.py b/potpourri/whack-ripe-prefixes.py
new file mode 100644
index 00000000..52ea3f18
--- /dev/null
+++ b/potpourri/whack-ripe-prefixes.py
@@ -0,0 +1,101 @@
+# $Id$
+#
+# Copyright (C) 2010 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Fix problems in prefixes.csv generated from RIPE's database.
+
+RIPE's database contains inconsistancies, overlaps, and format errors
+that make it impossible to feed the output of ripe-to-csv.awk directly
+into testbed-rootcert.py without OpenSSL rejecting the resulting
+root.conf. This script takes a brute force approach to fixing this:
+it converts all prefixes and address ranges into pairs of unsigned
+decimal integers representing range min and range max, runs the
+resulting 3+ million entry file through the unix sort program to put
+the data into canonical order, then reads it back, merging overlaps
+and converting everything back to ranges of IP addresses, and writing
+the result in a form acceptable to testbed-rootcert.py.
+
+Since we're doing all this anyway, the script also merges adjacent
+address blocks, not because OpenSSL can't handle them (it can) but
+because doing so cuts out a lot of unnecessary I/O.
+
+Ordinarily, it would be dangerous to have the same program act as both
+the source and sink of a pipeline, particularly for such a large data
+set, as the chance of deadlock would approach 100%, but in this case
+we know that the sort program must consume and buffer (somehow) all of
+its input before writing a single line of output, so a single script
+can safely act as a filter both before and after sort.
+"""
+
+import sys
+import subprocess
+import rpki.ipaddrs
+
+sorter = subprocess.Popen(("sort", "-T.", "-n"),
+ stdin = subprocess.PIPE,
+ stdout = subprocess.PIPE)
+
+for line in sys.stdin:
+ handle, prefix = line.split()
+
+ if "-" in prefix:
+ range_min, range_max = prefix.split("-")
+ range_min = rpki.ipaddrs.parse(range_min)
+ range_max = rpki.ipaddrs.parse(range_max)
+
+ else:
+ address, length = prefix.split("/")
+ address = rpki.ipaddrs.parse(address)
+ mask = (1L << (address.bits - int(length))) - 1
+ range_min = address & ~mask
+ range_max = address | mask
+
+ sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max)))
+
+sorter.stdin.close()
+
+prev_min = None
+prev_max = None
+
+def address(number):
+ if number > 0xffffffff:
+ return rpki.ipaddrs.v6addr(number)
+ else:
+ return rpki.ipaddrs.v4addr(number)
+
+def show():
+ if prev_min and prev_max:
+ sys.stdout.write("x\t%s-%s\n" % (address(prev_min), address(prev_max)))
+
+for line in sorter.stdout:
+ this_min, this_max = line.split()
+ this_min = long(this_min)
+ this_max = long(this_max)
+
+ if prev_min and prev_max and prev_max + 1 >= this_min:
+ prev_min = min(prev_min, this_min)
+ prev_max = max(prev_max, this_max)
+
+ else:
+ show()
+ prev_min = this_min
+ prev_max = this_max
+
+show()
+
+sorter.stdout.close()
+
+sys.exit(sorter.wait())
diff --git a/potpourri/x509-dot.py b/potpourri/x509-dot.py
new file mode 100644
index 00000000..42e1543a
--- /dev/null
+++ b/potpourri/x509-dot.py
@@ -0,0 +1,170 @@
+# $Id$
+
+"""
+Generate .dot description of a certificate tree.
+
+Copyright (C) 2009-2012 Internet Systems Consortium ("ISC")
+
+Permission to use, copy, modify, and distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
+Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN")
+
+Permission to use, copy, modify, and distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+"""
+
+import rpki.POW, sys, glob, os
+
+class x509(object):
+
+ ski = None
+ aki = None
+
+ show_file = False
+ show_ski = False
+ show_aki = False
+ show_issuer = True
+ show_subject = True
+
+ cn_only = True
+
+ subjects = {}
+
+ def __init__(self, filename):
+
+ while filename.startswith("./"):
+ filename = filename[2:]
+
+ self.filename = filename
+
+ f = open(filename, "rb")
+ text = f.read()
+ f.close()
+
+ if "-----BEGIN" in text:
+ self.pow = rpki.POW.X509.pemRead(text)
+ else:
+ self.pow = rpki.POW.X509.derRead(text)
+
+
+ try:
+ self.ski = ":".join(["%02X" % ord(i) for i in self.pow.getSKI()])
+ except:
+ pass
+
+ try:
+ self.aki = ":".join(["%02X" % ord(i) for i in self.pow.getAKI()])
+ except:
+ pass
+
+ self.subject = self.canonize(self.pow.getSubject())
+ self.issuer = self.canonize(self.pow.getIssuer())
+
+ if self.subject in self.subjects:
+ self.subjects[self.subject].append(self)
+ else:
+ self.subjects[self.subject] = [self]
+
+ def canonize(self, name):
+
+ # Probably should just use rpki.x509.X501DN class here.
+
+ try:
+ if self.cn_only and name[0][0][0] == "2.5.4.3":
+ return name[0][0][1]
+ except:
+ pass
+
+ return name
+
+ def set_node(self, node):
+
+ self.node = node
+
+ def dot(self):
+
+ label = []
+
+ if self.show_issuer:
+ label.append(("Issuer", self.issuer))
+
+ if self.show_subject:
+ label.append(("Subject", self.subject))
+
+ if self.show_file:
+ label.append(("File", self.filename))
+
+ if self.show_aki:
+ label.append(("AKI", self.aki))
+
+ if self.show_ski:
+ label.append(("SKI", self.ski))
+
+ print "#", repr(label)
+
+ if len(label) > 1:
+ print '%s [shape = record, label = "{%s}"];' % (self.node, "|".join("{%s|%s}" % (x, y) for x, y in label if y is not None))
+ else:
+ print '%s [label = "%s"];' % (self.node, label[0][1])
+
+ for issuer in self.subjects.get(self.issuer, ()):
+
+ if issuer is self:
+ print "# Issuer is self"
+ issuer = None
+
+ if issuer is not None and self.aki is not None and self.ski is not None and self.aki == self.ski:
+ print "# Self-signed"
+ issuer = None
+
+ if issuer is not None and self.aki is not None and issuer.ski is not None and self.aki != issuer.ski:
+ print "# AKI does not match issuer SKI"
+ issuer = None
+
+ if issuer is not None:
+ print "%s -> %s;" % (issuer.node, self.node)
+
+ print
+
+certs = []
+
+for topdir in sys.argv[1:] or ["."]:
+ for dirpath, dirnames, filenames in os.walk(topdir):
+ certs += [x509(dirpath + "/" + filename) for filename in filenames if filename.endswith(".cer")]
+
+for i, cert in enumerate(certs):
+ cert.set_node("cert_%d" % i)
+
+print """\
+digraph certificates {
+
+rotate = 90;
+#size = "11,8.5";
+splines = true;
+ratio = fill;
+
+"""
+
+for cert in certs:
+ cert.dot()
+
+print "}"