diff options
Diffstat (limited to 'potpourri')
40 files changed, 2030 insertions, 1577 deletions
diff --git a/potpourri/analyze-rcynic-history.py b/potpourri/analyze-rcynic-history.py index 648538cc..c0836ab2 100644 --- a/potpourri/analyze-rcynic-history.py +++ b/potpourri/analyze-rcynic-history.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -35,201 +35,201 @@ from xml.etree.cElementTree import (ElementTree as ElementTree, fromstring as ElementTreeFromString) def parse_utc(s): - return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ") + return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ") class Rsync_History(object): - """ - An Rsync_History object represents one rsync connection. - """ + """ + An Rsync_History object represents one rsync connection. + """ - def __init__(self, elt): - self.error = elt.get("error") - self.uri = elt.text.strip() - self.hostname = urlparse.urlparse(self.uri).hostname or None - self.elapsed = parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) + def __init__(self, elt): + self.error = elt.get("error") + self.uri = elt.text.strip() + self.hostname = urlparse.urlparse(self.uri).hostname or None + self.elapsed = parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) class Host(object): - """ - A host object represents all the data collected for one host. Note - that it (usually) contains a list of all the sessions in which this - host appears. - """ - - def __init__(self, hostname, session_id): - self.hostname = hostname - self.session_id = session_id - self.elapsed = datetime.timedelta(0) - self.connection_count = 0 - self.dead_connections = 0 - self.uris = set() - self.total_connection_time = datetime.timedelta(0) - - def add_rsync_history(self, h): - self.connection_count += 1 - self.elapsed += h.elapsed - self.dead_connections += int(h.error is not None) - self.total_connection_time += h.elapsed - - def add_uri(self, u): - self.uris.add(u) - - def finalize(self): - self.object_count = len(self.uris) - del self.uris - - @property - def failed(self): - return 1 if self.dead_connections else 0 - - @property - def seconds_per_object(self): - if self.failed: - return None - else: - return float(self.elapsed.days * 24 * 60 * 60 + - self.elapsed.seconds + - self.elapsed.microseconds / 10**6) / float(self.object_count) - - @property - def objects_per_connection(self): - if self.failed: - return None - else: - return float(self.object_count) / float(self.connection_count) - - @property - def average_connection_time(self): - return float(self.total_connection_time.days * 24 * 60 * 60 + - self.total_connection_time.seconds + - self.total_connection_time.microseconds / 10**6) / float(self.connection_count) - - class Format(object): - - def __init__(self, attr, title, fmt, ylabel = ""): - self.attr = attr - self.title = title - self.width = len(title) - int("%" in fmt) - self.fmt = "%%%d%s" % (self.width, fmt) - self.oops = "*" * self.width - self.ylabel = ylabel - - def __call__(self, obj): - try: - value = getattr(obj, self.attr) - return None if value is None else self.fmt % value - except ZeroDivisionError: - return self.oops - - format = (Format("connection_count", "Connections", "d", "Connections To Repository (Per Session)"), - Format("object_count", "Objects", "d", "Objects In Repository (Distinct URIs Per Session)"), - Format("objects_per_connection", "Objects/Connection", ".3f", "Objects In Repository / Connections To Repository"), - Format("seconds_per_object", "Seconds/Object", ".3f", "Seconds To Transfer / Object (Average Per Session)"), - Format("failure_rate_running", "Failure Rate", ".3f%%", "Sessions With Failed Connections Within Last %d Hours" % window_hours), - Format("average_connection_time", "Average Connection", ".3f", "Seconds / Connection (Average Per Session)"), - Format("hostname", "Hostname", "s")) - - format_dict = dict((fmt.attr, fmt) for fmt in format) - - def format_field(self, name): - result = self.format_dict[name](self) - return None if result is None else result.strip() + """ + A host object represents all the data collected for one host. Note + that it (usually) contains a list of all the sessions in which this + host appears. + """ + + def __init__(self, hostname, session_id): + self.hostname = hostname + self.session_id = session_id + self.elapsed = datetime.timedelta(0) + self.connection_count = 0 + self.dead_connections = 0 + self.uris = set() + self.total_connection_time = datetime.timedelta(0) + + def add_rsync_history(self, h): + self.connection_count += 1 + self.elapsed += h.elapsed + self.dead_connections += int(h.error is not None) + self.total_connection_time += h.elapsed + + def add_uri(self, u): + self.uris.add(u) + + def finalize(self): + self.object_count = len(self.uris) + del self.uris + + @property + def failed(self): + return 1 if self.dead_connections else 0 + + @property + def seconds_per_object(self): + if self.failed: + return None + else: + return float(self.elapsed.days * 24 * 60 * 60 + + self.elapsed.seconds + + self.elapsed.microseconds / 10**6) / float(self.object_count) + + @property + def objects_per_connection(self): + if self.failed: + return None + else: + return float(self.object_count) / float(self.connection_count) + + @property + def average_connection_time(self): + return float(self.total_connection_time.days * 24 * 60 * 60 + + self.total_connection_time.seconds + + self.total_connection_time.microseconds / 10**6) / float(self.connection_count) + + class Format(object): + + def __init__(self, attr, title, fmt, ylabel = ""): + self.attr = attr + self.title = title + self.width = len(title) - int("%" in fmt) + self.fmt = "%%%d%s" % (self.width, fmt) + self.oops = "*" * self.width + self.ylabel = ylabel + + def __call__(self, obj): + try: + value = getattr(obj, self.attr) + return None if value is None else self.fmt % value + except ZeroDivisionError: + return self.oops + + format = (Format("connection_count", "Connections", "d", "Connections To Repository (Per Session)"), + Format("object_count", "Objects", "d", "Objects In Repository (Distinct URIs Per Session)"), + Format("objects_per_connection", "Objects/Connection", ".3f", "Objects In Repository / Connections To Repository"), + Format("seconds_per_object", "Seconds/Object", ".3f", "Seconds To Transfer / Object (Average Per Session)"), + Format("failure_rate_running", "Failure Rate", ".3f%%", "Sessions With Failed Connections Within Last %d Hours" % window_hours), + Format("average_connection_time", "Average Connection", ".3f", "Seconds / Connection (Average Per Session)"), + Format("hostname", "Hostname", "s")) + + format_dict = dict((fmt.attr, fmt) for fmt in format) + + def format_field(self, name): + result = self.format_dict[name](self) + return None if result is None else result.strip() class Session(dict): - """ - A session corresponds to one XML file. This is a dictionary of Host - objects, keyed by hostname. - """ - - def __init__(self, session_id, msg_key): - self.session_id = session_id - self.msg_key = msg_key - self.date = parse_utc(session_id) - self.calculated_failure_history = False - - @property - def hostnames(self): - return set(self.iterkeys()) - - def get_plot_row(self, name, hostnames): - return (self.session_id,) + tuple(self[h].format_field(name) if h in self else "" for h in hostnames) - - def add_rsync_history(self, h): - if h.hostname not in self: - self[h.hostname] = Host(h.hostname, self.session_id) - self[h.hostname].add_rsync_history(h) - - def add_uri(self, u): - h = urlparse.urlparse(u).hostname - if h and h in self: - self[h].add_uri(u) - - def finalize(self): - for h in self.itervalues(): - h.finalize() - - def calculate_failure_history(self, sessions): - start = self.date - datetime.timedelta(hours = window_hours) - sessions = tuple(s for s in sessions if s.date <= self.date and s.date > start) - for hostname, h in self.iteritems(): - i = n = 0 - for s in sessions: - if hostname in s: - i += s[hostname].failed - n += 1 - h.failure_rate_running = float(100 * i) / n - self.calculated_failure_history = True + """ + A session corresponds to one XML file. This is a dictionary of Host + objects, keyed by hostname. + """ + + def __init__(self, session_id, msg_key): + self.session_id = session_id + self.msg_key = msg_key + self.date = parse_utc(session_id) + self.calculated_failure_history = False + + @property + def hostnames(self): + return set(self.iterkeys()) + + def get_plot_row(self, name, hostnames): + return (self.session_id,) + tuple(self[h].format_field(name) if h in self else "" for h in hostnames) + + def add_rsync_history(self, h): + if h.hostname not in self: + self[h.hostname] = Host(h.hostname, self.session_id) + self[h.hostname].add_rsync_history(h) + + def add_uri(self, u): + h = urlparse.urlparse(u).hostname + if h and h in self: + self[h].add_uri(u) + + def finalize(self): + for h in self.itervalues(): + h.finalize() + + def calculate_failure_history(self, sessions): + start = self.date - datetime.timedelta(hours = window_hours) + sessions = tuple(s for s in sessions if s.date <= self.date and s.date > start) + for hostname, h in self.iteritems(): + i = n = 0 + for s in sessions: + if hostname in s: + i += s[hostname].failed + n += 1 + h.failure_rate_running = float(100 * i) / n + self.calculated_failure_history = True def plotter(f, hostnames, field, logscale = False): - plotlines = sorted(session.get_plot_row(field, hostnames) for session in sessions) - title = Host.format_dict[field].title - ylabel = Host.format_dict[field].ylabel - n = len(hostnames) + 1 - assert all(n == len(plotline) for plotline in plotlines) - if "%%" in Host.format_dict[field].fmt: - f.write('set format y "%.0f%%"\n') - else: - f.write('set format y\n') - if logscale: - f.write("set logscale y\n") - else: - f.write("unset logscale y\n") - f.write(""" - set xdata time - set timefmt '%Y-%m-%dT%H:%M:%SZ' - #set format x '%m/%d' - #set format x '%b%d' - #set format x '%Y-%m-%d' - set format x '%Y-%m' - #set title '""" + title + """' - set ylabel '""" + ylabel + """' - plot""" + ",".join(" '-' using 1:2 with linespoints pointinterval 500 title '%s'" % h for h in hostnames) + "\n") - for i in xrange(1, n): - for plotline in plotlines: - if plotline[i] is not None: - f.write("%s %s\n" % (plotline[0], plotline[i].rstrip("%"))) - f.write("e\n") + plotlines = sorted(session.get_plot_row(field, hostnames) for session in sessions) + title = Host.format_dict[field].title + ylabel = Host.format_dict[field].ylabel + n = len(hostnames) + 1 + assert all(n == len(plotline) for plotline in plotlines) + if "%%" in Host.format_dict[field].fmt: + f.write('set format y "%.0f%%"\n') + else: + f.write('set format y\n') + if logscale: + f.write("set logscale y\n") + else: + f.write("unset logscale y\n") + f.write(""" + set xdata time + set timefmt '%Y-%m-%dT%H:%M:%SZ' + #set format x '%m/%d' + #set format x '%b%d' + #set format x '%Y-%m-%d' + set format x '%Y-%m' + #set title '""" + title + """' + set ylabel '""" + ylabel + """' + plot""" + ",".join(" '-' using 1:2 with linespoints pointinterval 500 title '%s'" % h for h in hostnames) + "\n") + for i in xrange(1, n): + for plotline in plotlines: + if plotline[i] is not None: + f.write("%s %s\n" % (plotline[0], plotline[i].rstrip("%"))) + f.write("e\n") def plot_hosts(hostnames, fields): - for field in fields: - for logscale in (False, True): - gnuplot = subprocess.Popen(("gnuplot",), stdin = subprocess.PIPE) - gnuplot.stdin.write("set terminal pdf\n") - gnuplot.stdin.write("set output '%s/%s-%s.pdf'\n" % (outdir, field, "log" if logscale else "linear")) - plotter(gnuplot.stdin, hostnames, field, logscale = logscale) - gnuplot.stdin.close() - gnuplot.wait() + for field in fields: + for logscale in (False, True): + gnuplot = subprocess.Popen(("gnuplot",), stdin = subprocess.PIPE) + gnuplot.stdin.write("set terminal pdf\n") + gnuplot.stdin.write("set output '%s/%s-%s.pdf'\n" % (outdir, field, "log" if logscale else "linear")) + plotter(gnuplot.stdin, hostnames, field, logscale = logscale) + gnuplot.stdin.close() + gnuplot.wait() outdir = "images" if not os.path.exists(outdir): - os.makedirs(outdir) + os.makedirs(outdir) mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False) if sys.platform == "darwin": # Sigh - shelf = shelve.open("rcynic-xml", "c") + shelf = shelve.open("rcynic-xml", "c") else: - shelf = shelve.open("rcynic-xml.db", "c") + shelf = shelve.open("rcynic-xml.db", "c") sessions = [] @@ -237,55 +237,55 @@ latest = None parsed = 0 for i, key in enumerate(mb.iterkeys(), 1): - sys.stderr.write("\r%s %d/%d/%d..." % ("|\\-/"[i & 3], parsed, i, len(mb))) - - if key in shelf: - session = shelf[key] - - else: - sys.stderr.write("%s..." % key) - assert not mb[key].is_multipart() - input = ElementTreeFromString(mb[key].get_payload()) - date = input.get("date") - sys.stderr.write("%s..." % date) - session = Session(date, key) - for elt in input.findall("rsync_history"): - session.add_rsync_history(Rsync_History(elt)) - for elt in input.findall("validation_status"): - if elt.get("generation") == "current": - session.add_uri(elt.text.strip()) - session.finalize() - shelf[key] = session - parsed += 1 - - sessions.append(session) - if latest is None or session.session_id > latest.session_id: - latest = session + sys.stderr.write("\r%s %d/%d/%d..." % ("|\\-/"[i & 3], parsed, i, len(mb))) + + if key in shelf: + session = shelf[key] + + else: + sys.stderr.write("%s..." % key) + assert not mb[key].is_multipart() + input = ElementTreeFromString(mb[key].get_payload()) + date = input.get("date") + sys.stderr.write("%s..." % date) + session = Session(date, key) + for elt in input.findall("rsync_history"): + session.add_rsync_history(Rsync_History(elt)) + for elt in input.findall("validation_status"): + if elt.get("generation") == "current": + session.add_uri(elt.text.strip()) + session.finalize() + shelf[key] = session + parsed += 1 + + sessions.append(session) + if latest is None or session.session_id > latest.session_id: + latest = session sys.stderr.write("\n") shelf.sync() for session in sessions: - if not getattr(session, "calculated_failure_history", False): - session.calculate_failure_history(sessions) - shelf[session.msg_key] = session + if not getattr(session, "calculated_failure_history", False): + session.calculate_failure_history(sessions) + shelf[session.msg_key] = session if plot_all_hosts: - hostnames = sorted(reduce(lambda x, y: x | y, - (s.hostnames for s in sessions), - set())) + hostnames = sorted(reduce(lambda x, y: x | y, + (s.hostnames for s in sessions), + set())) else: - hostnames = ("rpki.apnic.net", "rpki.ripe.net", "repository.lacnic.net", "rpki.afrinic.net", "rpki.arin.net", - #"localcert.ripe.net", "arin.rpki.net", "repo0.rpki.net", "rgnet.rpki.net", - "ca0.rpki.net") + hostnames = ("rpki.apnic.net", "rpki.ripe.net", "repository.lacnic.net", "rpki.afrinic.net", "rpki.arin.net", + #"localcert.ripe.net", "arin.rpki.net", "repo0.rpki.net", "rgnet.rpki.net", + "ca0.rpki.net") plot_hosts(hostnames, [fmt.attr for fmt in Host.format if fmt.attr != "hostname"]) if latest is not None: - f = open("rcynic.xml", "wb") - f.write(mb[latest.msg_key].get_payload()) - f.close() + f = open("rcynic.xml", "wb") + f.write(mb[latest.msg_key].get_payload()) + f.close() shelf.close() diff --git a/potpourri/analyze-transition.py b/potpourri/analyze-transition.py index e2125dfb..9f7928dc 100644 --- a/potpourri/analyze-transition.py +++ b/potpourri/analyze-transition.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# +# # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -21,44 +21,44 @@ Compare rcynic.xml files, tell the user what became invalid, and why. import sys try: - from lxml.etree import ElementTree + from lxml.etree import ElementTree except ImportError: - from xml.etree.ElementTree import ElementTree + from xml.etree.ElementTree import ElementTree class Object(object): - def __init__(self, session, uri): - self.session = session - self.uri = uri - self.labels = [] + def __init__(self, session, uri): + self.session = session + self.uri = uri + self.labels = [] - def add(self, label): - self.labels.append(label) + def add(self, label): + self.labels.append(label) - def __cmp__(self, other): - return cmp(self.labels, other.labels) + def __cmp__(self, other): + return cmp(self.labels, other.labels) - @property - def accepted(self): - return "object_accepted" in self.labels + @property + def accepted(self): + return "object_accepted" in self.labels class Session(dict): - def __init__(self, name): - self.name = name - tree = ElementTree(file = name) - labels = tuple((elt.tag.strip(), elt.text.strip()) for elt in tree.find("labels")) - self.labels = tuple(pair[0] for pair in labels) - self.descrs = dict(labels) - self.date = tree.getroot().get("date") - for elt in tree.findall("validation_status"): - status = elt.get("status") - uri = elt.text.strip() - if status.startswith("rsync_transfer_") or elt.get("generation") != "current": - continue - if uri not in self: - self[uri] = Object(self, uri) - self[uri].add(status) + def __init__(self, name): + self.name = name + tree = ElementTree(file = name) + labels = tuple((elt.tag.strip(), elt.text.strip()) for elt in tree.find("labels")) + self.labels = tuple(pair[0] for pair in labels) + self.descrs = dict(labels) + self.date = tree.getroot().get("date") + for elt in tree.findall("validation_status"): + status = elt.get("status") + uri = elt.text.strip() + if status.startswith("rsync_transfer_") or elt.get("generation") != "current": + continue + if uri not in self: + self[uri] = Object(self, uri) + self[uri].add(status) skip_labels = frozenset(("object_accepted", "object_rejected")) @@ -66,23 +66,23 @@ old_db = new_db = None for arg in sys.argv[1:]: - old_db = new_db - new_db = Session(arg) - if old_db is None: - continue - - old_uris = frozenset(old_db) - new_uris = frozenset(new_db) - - for uri in sorted(old_uris - new_uris): - print new_db.date, uri, "dropped" - - for uri in sorted(old_uris & new_uris): - old = old_db[uri] - new = new_db[uri] - if old.accepted and not new.accepted: - print new_db.date, uri, "invalid" - labels = frozenset(new.labels) - frozenset(old.labels) - skip_labels - for label in new.labels: - if label in labels: - print " ", new_db.descrs[label] + old_db = new_db + new_db = Session(arg) + if old_db is None: + continue + + old_uris = frozenset(old_db) + new_uris = frozenset(new_db) + + for uri in sorted(old_uris - new_uris): + print new_db.date, uri, "dropped" + + for uri in sorted(old_uris & new_uris): + old = old_db[uri] + new = new_db[uri] + if old.accepted and not new.accepted: + print new_db.date, uri, "invalid" + labels = frozenset(new.labels) - frozenset(old.labels) - skip_labels + for label in new.labels: + if label in labels: + print " ", new_db.descrs[label] diff --git a/potpourri/apnic-to-csv.py b/potpourri/apnic-to-csv.py index 62293a51..83f5388b 100644 --- a/potpourri/apnic-to-csv.py +++ b/potpourri/apnic-to-csv.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -27,29 +27,29 @@ prefixes = csv_writer("prefixes.csv") for line in open("delegated-apnic-extended-latest"): - line = line.rstrip() + line = line.rstrip() - if not line.startswith("apnic|") or line.endswith("|summary"): - continue + if not line.startswith("apnic|") or line.endswith("|summary"): + continue - try: - registry, cc, rectype, start, value, date, status, opaque_id = line.split("|") - except ValueError: - continue + try: + registry, cc, rectype, start, value, date, status, opaque_id = line.split("|") + except ValueError: + continue - if not opaque_id: - continue + if not opaque_id: + continue - assert registry == "apnic" + assert registry == "apnic" - if rectype == "asn": - asns.writerow((opaque_id, "%s-%s" % (start, int(start) + int(value) - 1))) + if rectype == "asn": + asns.writerow((opaque_id, "%s-%s" % (start, int(start) + int(value) - 1))) - elif rectype == "ipv4": - prefixes.writerow((opaque_id, "%s-%s" % (start, v4addr(v4addr(start) + long(value) - 1)))) + elif rectype == "ipv4": + prefixes.writerow((opaque_id, "%s-%s" % (start, v4addr(v4addr(start) + long(value) - 1)))) - elif rectype == "ipv6": - prefixes.writerow((opaque_id, "%s/%s" % (start, value))) + elif rectype == "ipv6": + prefixes.writerow((opaque_id, "%s/%s" % (start, value))) asns.close() prefixes.close() diff --git a/potpourri/arin-to-csv.py b/potpourri/arin-to-csv.py index a4e7ffc3..a4b7f285 100644 --- a/potpourri/arin-to-csv.py +++ b/potpourri/arin-to-csv.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -34,9 +34,9 @@ import lxml.etree from rpki.csv_utils import csv_writer def ns(tag): - return "{http://www.arin.net/bulkwhois/core/v1}" + tag + return "{http://www.arin.net/bulkwhois/core/v1}" + tag -tag_asn = ns("asn") +tag_asn = ns("asn") tag_net = ns("net") tag_org = ns("org") tag_poc = ns("poc") @@ -49,12 +49,12 @@ tag_startAsNumber = ns("startAsNumber") tag_endAsNumber = ns("endAsNumber") def find(node, tag): - return node.findtext(tag).strip() + return node.findtext(tag).strip() def do_asn(node): - asns.writerow((find(node, tag_orgHandle), - "%s-%s" % (find(node, tag_startAsNumber), - find(node, tag_endAsNumber)))) + asns.writerow((find(node, tag_orgHandle), + "%s-%s" % (find(node, tag_startAsNumber), + find(node, tag_endAsNumber)))) erx_table = { "AF" : "afrinic", @@ -71,19 +71,19 @@ erx_table = { "RX" : "ripe" } def do_net(node): - handle = find(node, tag_orgHandle) - for netblock in node.iter(tag_netBlock): - tag = find(netblock, tag_type) - startAddress = find(netblock, tag_startAddress) - endAddress = find(netblock, tag_endAddress) - if not startAddress.endswith(".000") and not startAddress.endswith(":0000"): - continue - if not endAddress.endswith(".255") and not endAddress.endswith(":FFFF"): - continue - if tag in ("DS", "DA", "IU"): - prefixes.writerow((handle, "%s-%s" % (startAddress, endAddress))) - elif tag in erx_table: - erx.writerow((erx_table[tag], "%s-%s" % (startAddress, endAddress))) + handle = find(node, tag_orgHandle) + for netblock in node.iter(tag_netBlock): + tag = find(netblock, tag_type) + startAddress = find(netblock, tag_startAddress) + endAddress = find(netblock, tag_endAddress) + if not startAddress.endswith(".000") and not startAddress.endswith(":0000"): + continue + if not endAddress.endswith(".255") and not endAddress.endswith(":FFFF"): + continue + if tag in ("DS", "DA", "IU"): + prefixes.writerow((handle, "%s-%s" % (startAddress, endAddress))) + elif tag in erx_table: + erx.writerow((erx_table[tag], "%s-%s" % (startAddress, endAddress))) dispatch = { tag_asn : do_asn, tag_net : do_net } @@ -95,19 +95,19 @@ root = None for event, node in lxml.etree.iterparse(sys.stdin): - if root is None: - root = node - while root.getparent() is not None: - root = root.getparent() + if root is None: + root = node + while root.getparent() is not None: + root = root.getparent() - if node.getparent() is root: + if node.getparent() is root: - if node.tag in dispatch: - dispatch[node.tag](node) + if node.tag in dispatch: + dispatch[node.tag](node) - node.clear() - while node.getprevious() is not None: - del node.getparent()[0] + node.clear() + while node.getprevious() is not None: + del node.getparent()[0] asns.close() prefixes.close() diff --git a/potpourri/cross_certify.py b/potpourri/cross_certify.py index fab7743b..4e6485b7 100644 --- a/potpourri/cross_certify.py +++ b/potpourri/cross_certify.py @@ -1,13 +1,13 @@ # $Id$ -# +# # Copyright (C) 2014 Dragon Research Labs ("DRL") # Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") # Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notices and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL # WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, @@ -57,18 +57,18 @@ now = rpki.sundial.now() notAfter = now + args.lifetime try: - with open(args.serial, "r") as f: - serial = int(f.read().splitlines()[0], 16) + with open(args.serial, "r") as f: + serial = int(f.read().splitlines()[0], 16) except IOError: - serial = 1 + serial = 1 cert = args.ca.cross_certify(args.key, args.input, serial, notAfter, now) with open(args.serial, "w") as f: - f.write("%02x\n" % (serial + 1)) + f.write("%02x\n" % (serial + 1)) if args.out is None: - sys.stdout.write(cert.get_PEM()) + sys.stdout.write(cert.get_PEM()) else: - with open(args.out, "w") as f: - f.write(cert.get_PEM()) + with open(args.out, "w") as f: + f.write(cert.get_PEM()) diff --git a/potpourri/csvgrep.py b/potpourri/csvgrep.py index 68bdd259..3d558245 100644 --- a/potpourri/csvgrep.py +++ b/potpourri/csvgrep.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -37,36 +37,36 @@ ipv4 = resource_set_ipv4() ipv6 = resource_set_ipv6() for datum in sys.argv[1:]: - if datum.replace("-", "").isdigit(): - t = asn - else: - t = ipv6 if ":" in datum else ipv4 - if "-" not in datum and "/" not in datum: - datum = datum + "-" + datum - try: - t.append(t.parse_str(datum)) - except: - print "Error attempting to parse", datum - raise + if datum.replace("-", "").isdigit(): + t = asn + else: + t = ipv6 if ":" in datum else ipv4 + if "-" not in datum and "/" not in datum: + datum = datum + "-" + datum + try: + t.append(t.parse_str(datum)) + except: + print "Error attempting to parse", datum + raise #print "Looking for: ASNs %s IPv4 %s IPv6 %s" % (asn, ipv4, ipv6) def matches(set1, datum): - set2 = set1.__class__(datum) - if set1.intersection(set2): - return set2 - else: - return False + set2 = set1.__class__(datum) + if set1.intersection(set2): + return set2 + else: + return False if asn: - for h, a in csv_reader("asns.csv", columns = 2): - m = matches(asn, a) - if m: - print h, m + for h, a in csv_reader("asns.csv", columns = 2): + m = matches(asn, a) + if m: + print h, m if ipv4 or ipv6: - for h, a in csv_reader("prefixes.csv", columns = 2): - t = ipv6 if ":" in a else ipv4 - m = t and matches(t, a) - if m: - print h, m + for h, a in csv_reader("prefixes.csv", columns = 2): + t = ipv6 if ":" in a else ipv4 + m = t and matches(t, a) + if m: + print h, m diff --git a/potpourri/django-legacy-database.README b/potpourri/django-legacy-database.README new file mode 100644 index 00000000..41a3b911 --- /dev/null +++ b/potpourri/django-legacy-database.README @@ -0,0 +1,4 @@ +Snapshot of work in progress on converting our existing databases into +Django using South 1.0 migrations. This will probably need rewriting +to address changes in how we deal with Django settings and multiple +databases, this snapshot is just to get it into the subversion archive. diff --git a/potpourri/django-legacy-database.tar.xz b/potpourri/django-legacy-database.tar.xz Binary files differnew file mode 100644 index 00000000..762dde7d --- /dev/null +++ b/potpourri/django-legacy-database.tar.xz diff --git a/potpourri/expand-roa-prefixes.py b/potpourri/expand-roa-prefixes.py index ae34ea0a..c08f8abf 100644 --- a/potpourri/expand-roa-prefixes.py +++ b/potpourri/expand-roa-prefixes.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2011 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -27,53 +27,53 @@ import rpki.resource_set import rpki.ipaddrs class NotAPrefix(Exception): - """ - Address is not a proper prefix. - """ + """ + Address is not a proper prefix. + """ class address_range(object): - """ - Iterator for rpki.ipaddrs address objects. - """ + """ + Iterator for rpki.ipaddrs address objects. + """ - def __init__(self, start, stop, step): - self.addr = start - self.stop = stop - self.step = step - self.type = type(start) + def __init__(self, start, stop, step): + self.addr = start + self.stop = stop + self.step = step + self.type = type(start) - def __iter__(self): - while self.addr < self.stop: - yield self.addr - self.addr = self.type(self.addr + self.step) + def __iter__(self): + while self.addr < self.stop: + yield self.addr + self.addr = self.type(self.addr + self.step) def main(argv): - prefix_sets = [] - for arg in argv: - if ":" in arg: - prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv6(arg)) - else: - prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv4(arg)) + prefix_sets = [] + for arg in argv: + if ":" in arg: + prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv6(arg)) + else: + prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv4(arg)) - for prefix_set in prefix_sets: - sys.stdout.write("%s expands to:\n" % prefix_set) + for prefix_set in prefix_sets: + sys.stdout.write("%s expands to:\n" % prefix_set) - prefix_type = prefix_set.range_type.datum_type - prefix_min = prefix_set.prefix - prefix_max = prefix_set.prefix + (1L << (prefix_type.bits - prefix_set.prefixlen)) + prefix_type = prefix_set.range_type.datum_type + prefix_min = prefix_set.prefix + prefix_max = prefix_set.prefix + (1L << (prefix_type.bits - prefix_set.prefixlen)) - for prefixlen in xrange(prefix_set.prefixlen, prefix_set.max_prefixlen + 1): + for prefixlen in xrange(prefix_set.prefixlen, prefix_set.max_prefixlen + 1): - step = (1L << (prefix_type.bits - prefixlen)) - mask = step - 1 + step = (1L << (prefix_type.bits - prefixlen)) + mask = step - 1 - for addr in address_range(prefix_min, prefix_max, step): - if (addr & mask) != 0: - raise NotAPrefix, "%s is not a /%d prefix" % (addr, prefixlen) - sys.stdout.write(" %s/%d\n" % (addr, prefixlen)) + for addr in address_range(prefix_min, prefix_max, step): + if (addr & mask) != 0: + raise NotAPrefix, "%s is not a /%d prefix" % (addr, prefixlen) + sys.stdout.write(" %s/%d\n" % (addr, prefixlen)) - sys.stdout.write("\n") + sys.stdout.write("\n") if __name__ == "__main__": - main(sys.argv[1:] if len(sys.argv) > 1 else ["18.0.0.0/8-24"]) + main(sys.argv[1:] if len(sys.argv) > 1 else ["18.0.0.0/8-24"]) diff --git a/potpourri/extract-key.py b/potpourri/extract-key.py index b85c3d55..e185b255 100644 --- a/potpourri/extract-key.py +++ b/potpourri/extract-key.py @@ -2,11 +2,11 @@ # Copyright (C) 2014 Dragon Research Labs ("DRL") # Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notices and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND AND ARIN DISCLAIM ALL # WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR @@ -49,16 +49,16 @@ args = parser.parse_args() cur = MySQLdb.connect(user = args.user, db = args.db, passwd = args.password).cursor() cur.execute( - """ - SELECT bsc.private_key_id, bsc.signing_cert - FROM bsc, self - WHERE self.self_handle = %s AND self.self_id = bsc.self_id AND bsc_handle = %s - """, - (args.self, args.bsc)) + """ + SELECT bsc.private_key_id, bsc.signing_cert + FROM bsc, self + WHERE self.self_handle = %s AND self.self_id = bsc.self_id AND bsc_handle = %s + """, + (args.self, args.bsc)) key, cer = cur.fetchone() print rpki.x509.RSA(DER = key).get_PEM() if cer: - print rpki.x509.X509(DER = cer).get_PEM() + print rpki.x509.X509(DER = cer).get_PEM() diff --git a/potpourri/fakerootd.py b/potpourri/fakerootd.py index 6275a2a9..22b1c117 100644 --- a/potpourri/fakerootd.py +++ b/potpourri/fakerootd.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2011 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -44,7 +44,6 @@ s6.listen(limit) print "Going to sleep at", datetime.datetime.utcnow() try: - signal.pause() + signal.pause() except KeyboardInterrupt: - sys.exit(0) - + sys.exit(0) diff --git a/potpourri/format-application-x-rpki.py b/potpourri/format-application-x-rpki.py index 184103f9..44428131 100644 --- a/potpourri/format-application-x-rpki.py +++ b/potpourri/format-application-x-rpki.py @@ -1,12 +1,12 @@ # $Id$ -# +# # Copyright (C) 2014 Dragon Research Labs ("DRL") # Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notices and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL # WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR @@ -53,80 +53,80 @@ parser.add_argument("-u", "--unseen", action = "store_true", args = parser.parse_args() def pprint_cert(b64): - return rpki.POW.X509.derRead(base64.b64decode(b64)).pprint() - + return rpki.POW.X509.derRead(base64.b64decode(b64)).pprint() + def up_down(): - msg["X-RPKI-Up-Down-Type"] = xml.get("type") - msg["X-RPKI-Up-Down-Sender"] = xml.get("sender") - msg["X-RPKI-Up-Down-Recipient"] = xml.get("recipient") - msg["Subject"] = "Up-down %s %s => %s" % (xml.get("type"), xml.get("sender"), xml.get("recipient")) - for x in xml: - if x.tag.endswith("class"): - for y in x: - if y.tag.endswith("certificate") or y.tag.endswith("issuer"): - msg.attach(email.mime.text.MIMEText(pprint_cert(y.text))) + msg["X-RPKI-Up-Down-Type"] = xml.get("type") + msg["X-RPKI-Up-Down-Sender"] = xml.get("sender") + msg["X-RPKI-Up-Down-Recipient"] = xml.get("recipient") + msg["Subject"] = "Up-down %s %s => %s" % (xml.get("type"), xml.get("sender"), xml.get("recipient")) + for x in xml: + if x.tag.endswith("class"): + for y in x: + if y.tag.endswith("certificate") or y.tag.endswith("issuer"): + msg.attach(email.mime.text.MIMEText(pprint_cert(y.text))) def left_right(): - msg["X-RPKI-Left-Right-Type"] = xml.get("type") - msg["Subject"] = "Left-right %s" % xml.get("type") + msg["X-RPKI-Left-Right-Type"] = xml.get("type") + msg["Subject"] = "Left-right %s" % xml.get("type") def publication(): - msg["X-RPKI-Left-Right-Type"] = xml.get("type") - msg["Subject"] = "Publication %s" % xml.get("type") + msg["X-RPKI-Left-Right-Type"] = xml.get("type") + msg["Subject"] = "Publication %s" % xml.get("type") dispatch = { "{http://www.apnic.net/specs/rescerts/up-down/}message" : up_down, "{http://www.hactrn.net/uris/rpki/left-right-spec/}msg" : left_right, "{http://www.hactrn.net/uris/rpki/publication-spec/}msg" : publication } def fix_headers(): - if "X-RPKI-PID" in srcmsg or "X-RPKI-Object" in srcmsg: - msg["X-RPKI-PID"] = srcmsg["X-RPKI-PID"] - msg["X-RPKI-Object"] = srcmsg["X-RPKI-Object"] - else: - words = srcmsg["Subject"].split() - msg["X-RPKI-PID"] = words[1] - msg["X-RPKI-Object"] = " ".join(words[4:]) - + if "X-RPKI-PID" in srcmsg or "X-RPKI-Object" in srcmsg: + msg["X-RPKI-PID"] = srcmsg["X-RPKI-PID"] + msg["X-RPKI-Object"] = srcmsg["X-RPKI-Object"] + else: + words = srcmsg["Subject"].split() + msg["X-RPKI-PID"] = words[1] + msg["X-RPKI-Object"] = " ".join(words[4:]) + destination = None source = None try: - destination = mailbox.MH(args.output, factory = None, create = True) - source = mailbox.Maildir(args.input, factory = None) + destination = mailbox.MH(args.output, factory = None, create = True) + source = mailbox.Maildir(args.input, factory = None) - for srckey, srcmsg in source.iteritems(): - if args.unseen and "S" in srcmsg.get_flags(): - continue - assert not srcmsg.is_multipart() and srcmsg.get_content_type() == "application/x-rpki" - payload = srcmsg.get_payload(decode = True) - cms = rpki.POW.CMS.derRead(payload) - txt = cms.verify(rpki.POW.X509Store(), None, rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY) - xml = lxml.etree.fromstring(txt) - tag = xml.tag - if args.tag and tag != args.tag: - continue - msg = email.mime.multipart.MIMEMultipart("related") - msg["X-RPKI-Tag"] = tag - for i in ("Date", "Message-ID", "X-RPKI-Timestamp"): - msg[i] = srcmsg[i] - fix_headers() - if tag in dispatch: - dispatch[tag]() - if "Subject" not in msg: - msg["Subject"] = srcmsg["Subject"] - msg.attach(email.mime.text.MIMEText(txt)) - msg.attach(email.mime.application.MIMEApplication(payload, "x-rpki")) - msg.epilogue = "\n" # Force trailing newline - key = destination.add(msg) - print "Added", key - if args.kill: - del source[srckey] - elif args.mark: - srcmsg.set_subdir("cur") - srcmsg.add_flag("S") - source[srckey] = srcmsg + for srckey, srcmsg in source.iteritems(): + if args.unseen and "S" in srcmsg.get_flags(): + continue + assert not srcmsg.is_multipart() and srcmsg.get_content_type() == "application/x-rpki" + payload = srcmsg.get_payload(decode = True) + cms = rpki.POW.CMS.derRead(payload) + txt = cms.verify(rpki.POW.X509Store(), None, rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY) + xml = lxml.etree.fromstring(txt) + tag = xml.tag + if args.tag and tag != args.tag: + continue + msg = email.mime.multipart.MIMEMultipart("related") + msg["X-RPKI-Tag"] = tag + for i in ("Date", "Message-ID", "X-RPKI-Timestamp"): + msg[i] = srcmsg[i] + fix_headers() + if tag in dispatch: + dispatch[tag]() + if "Subject" not in msg: + msg["Subject"] = srcmsg["Subject"] + msg.attach(email.mime.text.MIMEText(txt)) + msg.attach(email.mime.application.MIMEApplication(payload, "x-rpki")) + msg.epilogue = "\n" # Force trailing newline + key = destination.add(msg) + print "Added", key + if args.kill: + del source[srckey] + elif args.mark: + srcmsg.set_subdir("cur") + srcmsg.add_flag("S") + source[srckey] = srcmsg finally: - if destination: - destination.close() - if source: - source.close() + if destination: + destination.close() + if source: + source.close() diff --git a/potpourri/gc_summary.py b/potpourri/gc_summary.py index 1f6987bf..61b21587 100644 --- a/potpourri/gc_summary.py +++ b/potpourri/gc_summary.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -20,93 +20,93 @@ import sys, os, time class datapoint(object): - outtype = os.getenv("OUTTYPE", "png") - outname = os.getenv("OUTNAME", "") - timefmt = os.getenv("TIMEFMT", "%T") - pretend = os.getenv("PRETEND_EVERYTHING_CHANGED", False) - threshold = int(os.getenv("THRESHOLD", "100")) + outtype = os.getenv("OUTTYPE", "png") + outname = os.getenv("OUTNAME", "") + timefmt = os.getenv("TIMEFMT", "%T") + pretend = os.getenv("PRETEND_EVERYTHING_CHANGED", False) + threshold = int(os.getenv("THRESHOLD", "100")) - raw = [] - filenames = [] + raw = [] + filenames = [] - def __init__(self, filename, timestamp, process, count, typesig, line): - self.filename = filename - self.timestamp = timestamp - self.process = process - self.count = count - self.typesig = typesig - self.line = line - self.key = "%s %s" % (filename, typesig) - self.raw.append(self) - if filename not in self.filenames: - self.filenames.append(filename) + def __init__(self, filename, timestamp, process, count, typesig, line): + self.filename = filename + self.timestamp = timestamp + self.process = process + self.count = count + self.typesig = typesig + self.line = line + self.key = "%s %s" % (filename, typesig) + self.raw.append(self) + if filename not in self.filenames: + self.filenames.append(filename) - def __cmp__(self, other): - c = cmp(self.key, other.key) - return c if c else cmp(self.timestamp, other.timestamp) + def __cmp__(self, other): + c = cmp(self.key, other.key) + return c if c else cmp(self.timestamp, other.timestamp) - @classmethod - def plot(cls): + @classmethod + def plot(cls): - print "# [%s] Looking for interesting records" % time.strftime("%T") - changed = {} - for i in cls.raw: - if i.key not in changed: - changed[i.key] = set() - changed[i.key].add(i.count) - if cls.pretend: - changed = set(changed.iterkeys()) - else: - changed = set(k for k, v in changed.iteritems() if max(v) - min(v) > cls.threshold) + print "# [%s] Looking for interesting records" % time.strftime("%T") + changed = {} + for i in cls.raw: + if i.key not in changed: + changed[i.key] = set() + changed[i.key].add(i.count) + if cls.pretend: + changed = set(changed.iterkeys()) + else: + changed = set(k for k, v in changed.iteritems() if max(v) - min(v) > cls.threshold) - if not changed: - print "# [%s] Apparently nothing worth reporting" % time.strftime("%T") - print "print 'Nothing to plot'" - return + if not changed: + print "# [%s] Apparently nothing worth reporting" % time.strftime("%T") + print "print 'Nothing to plot'" + return - print "# [%s] Header" % time.strftime("%T") - print "set xdata time" - print "set timefmt '%Y-%m-%dT%H:%M:%S'" - print "set format x '%s'" % cls.timefmt - print "set key right bottom" - if cls.outname: - print "set terminal", cls.outtype - print "set output '%s.%s'" % (cls.outname, cls.outtype) - print "set term png size 1024,1024" - print "plot", ", ".join("'-' using 1:2 with linespoints title '%s'" % i for i in changed) + print "# [%s] Header" % time.strftime("%T") + print "set xdata time" + print "set timefmt '%Y-%m-%dT%H:%M:%S'" + print "set format x '%s'" % cls.timefmt + print "set key right bottom" + if cls.outname: + print "set terminal", cls.outtype + print "set output '%s.%s'" % (cls.outname, cls.outtype) + print "set term png size 1024,1024" + print "plot", ", ".join("'-' using 1:2 with linespoints title '%s'" % i for i in changed) - print "# [%s] Sorting" % time.strftime("%T") - cls.raw.sort() + print "# [%s] Sorting" % time.strftime("%T") + cls.raw.sort() - key = None - proc = None - for i in cls.raw: - if i.key not in changed: - continue - if key is not None and i.key != key: + key = None + proc = None + for i in cls.raw: + if i.key not in changed: + continue + if key is not None and i.key != key: + print "e" + elif proc is not None and i.process != proc: + print "" + key = i.key + proc = i.process + print "#", i.key, i.line + print i.timestamp, i.count print "e" - elif proc is not None and i.process != proc: - print "" - key = i.key - proc = i.process - print "#", i.key, i.line - print i.timestamp, i.count - print "e" - if not cls.outname: - print "pause mouse any" + if not cls.outname: + print "pause mouse any" for filename in sys.argv[1:]: - print "# [%s] Reading %s" % (time.strftime("%T"), filename) - for line in open(filename): - if "gc_summary:" in line: - word = line.split(None, 6) - if word[4].isdigit() and word[5].startswith("(") and word[5].endswith(")"): - datapoint(filename = filename, - timestamp = word[0] + "T" + word[1], - process = word[2], - count = int(word[4]), - typesig = word[5], - line = line.strip()) - + print "# [%s] Reading %s" % (time.strftime("%T"), filename) + for line in open(filename): + if "gc_summary:" in line: + word = line.split(None, 6) + if word[4].isdigit() and word[5].startswith("(") and word[5].endswith(")"): + datapoint(filename = filename, + timestamp = word[0] + "T" + word[1], + process = word[2], + count = int(word[4]), + typesig = word[5], + line = line.strip()) + print "# [%s] Plotting" % time.strftime("%T") datapoint.plot() diff --git a/potpourri/generate-ripe-root-cert.py b/potpourri/generate-ripe-root-cert.py index 3407bc51..1b891dce 100644 --- a/potpourri/generate-ripe-root-cert.py +++ b/potpourri/generate-ripe-root-cert.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -24,10 +24,10 @@ import lxml.etree from rpki.csv_utils import csv_writer def iterate_xml(filename, tag): - return lxml.etree.parse(filename).getroot().getiterator(tag) + return lxml.etree.parse(filename).getroot().getiterator(tag) def ns(tag): - return "{http://www.iana.org/assignments}" + tag + return "{http://www.iana.org/assignments}" + tag tag_description = ns("description") tag_designation = ns("designation") @@ -39,19 +39,19 @@ asns = csv_writer("asns.csv") prefixes = csv_writer("prefixes.csv") for record in iterate_xml("as-numbers.xml", tag_record): - if record.findtext(tag_description) == "Assigned by RIPE NCC": - asns.writerow(("RIPE", record.findtext(tag_number))) - + if record.findtext(tag_description) == "Assigned by RIPE NCC": + asns.writerow(("RIPE", record.findtext(tag_number))) + for record in iterate_xml("ipv4-address-space.xml", tag_record): - if record.findtext(tag_designation) in ("RIPE NCC", "Administered by RIPE NCC"): - prefix = record.findtext(tag_prefix) - p, l = prefix.split("/") - assert l == "8", "Violated /8 assumption: %r" % prefix - prefixes.writerow(("RIPE", "%d.0.0.0/8" % int(p))) - + if record.findtext(tag_designation) in ("RIPE NCC", "Administered by RIPE NCC"): + prefix = record.findtext(tag_prefix) + p, l = prefix.split("/") + assert l == "8", "Violated /8 assumption: %r" % prefix + prefixes.writerow(("RIPE", "%d.0.0.0/8" % int(p))) + for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record): - if record.findtext(tag_description) == "RIPE NCC": - prefixes.writerow(("RIPE", record.findtext(tag_prefix))) + if record.findtext(tag_description) == "RIPE NCC": + prefixes.writerow(("RIPE", record.findtext(tag_prefix))) asns.close() prefixes.close() diff --git a/potpourri/generate-root-certificate b/potpourri/generate-root-certificate deleted file mode 100755 index 31647d5f..00000000 --- a/potpourri/generate-root-certificate +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python - -""" -Generate an RPKI root certificate for rootd. In most cases you should -not need to do this; see caveats in the manual about running rootd if -you think you need this. This script does nothing that can't also be -done with the OpenSSL command line tool, but on some platforms the -installed copy of openssl doesn't understand the RFC 3779 extensions. -""" - -import os -import sys -import time -import argparse -import rpki.x509 -import rpki.config -import rpki.sundial -import rpki.resource_set - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-c", "--config", help = "configuration file") -parser.add_argument("-a", "--asns", default = "0-4294967295", help = "ASN resources") -parser.add_argument("-4", "--ipv4", default = "0.0.0.0/0", help = "IPv4 resources") -parser.add_argument("-6", "--ipv6", default = "::/0", help = "IPv6 resources") -parser.add_argument("--certificate", default = "root.cer", help = "certificate file") -parser.add_argument("--key", default = "root.key", help = "key file") -parser.add_argument("--tal", default = "root.tal", help = "TAL file") -args = parser.parse_args() - -cfg = rpki.config.parser(args.config, "rootd") - -resources = rpki.resource_set.resource_bag( - asn = rpki.resource_set.resource_set_as(args.asns), - v4 = rpki.resource_set.resource_set_ipv4(args.ipv4), - v6 = rpki.resource_set.resource_set_ipv6(args.ipv6)) - -keypair = rpki.x509.RSA.generate(quiet = True) - -sia = cfg.get("rpki-base-uri") -sia = (sia, sia + "root.mft", None) - -uri = cfg.get("rpki-root-cert-uri") - -cert = rpki.x509.X509.self_certify( - keypair = keypair, - subject_key = keypair.get_public(), - serial = 1, - sia = sia, - notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365), - resources = resources) - -with open(args.certificate, "wb") as f: - f.write(cert.get_DER()) - -with open(args.key, "wb") as f: - f.write(keypair.get_DER()) - -with open(args.tal, "w") as f: - f.write(uri + "\n\n" + keypair.get_public().get_Base64()) diff --git a/potpourri/gski.py b/potpourri/gski.py index 083a59c8..3faf22d6 100644 --- a/potpourri/gski.py +++ b/potpourri/gski.py @@ -17,5 +17,5 @@ import rpki.x509, sys for file in sys.argv[1:]: - cert = rpki.x509.X509(Auto_file = file) - print cert.gSKI(), cert.hSKI(), file + cert = rpki.x509.X509(Auto_file = file) + print cert.gSKI(), cert.hSKI(), file diff --git a/potpourri/guess-roas-from-routeviews.py b/potpourri/guess-roas-from-routeviews.py index d8fb9c4c..8e2ed81a 100644 --- a/potpourri/guess-roas-from-routeviews.py +++ b/potpourri/guess-roas-from-routeviews.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2009 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -39,25 +39,25 @@ from rpki.resource_set import roa_prefix_ipv4, resource_set_ipv4, resource_range roas = [] for filename in sys.argv[1:]: - resources = rpki.x509.X509(Auto_file = filename).get_3779resources().v4 + resources = rpki.x509.X509(Auto_file = filename).get_3779resources().v4 - while resources: - labels = str(resources[0].min).split(".") - labels.reverse() + while resources: + labels = str(resources[0].min).split(".") + labels.reverse() - try: - for answer in dns.resolver.query(".".join(labels) + ".asn.routeviews.org", "txt"): - asn, prefix, prefixlen = answer.strings - roa_prefix = roa_prefix_ipv4(v4addr(prefix), long(prefixlen)) - roa = "%s\t%s\t%s" % (roa_prefix, long(asn), filename) - if roa not in roas: - roas.append(roa) - resources = resources.difference(resource_set_ipv4([roa_prefix.to_resource_range()])) + try: + for answer in dns.resolver.query(".".join(labels) + ".asn.routeviews.org", "txt"): + asn, prefix, prefixlen = answer.strings + roa_prefix = roa_prefix_ipv4(v4addr(prefix), long(prefixlen)) + roa = "%s\t%s\t%s" % (roa_prefix, long(asn), filename) + if roa not in roas: + roas.append(roa) + resources = resources.difference(resource_set_ipv4([roa_prefix.to_resource_range()])) - except dns.resolver.NXDOMAIN: - resources = resources.difference(resource_set_ipv4([resource_range_ipv4(resources[0].min, v4addr(resources[0].min + 256))])) + except dns.resolver.NXDOMAIN: + resources = resources.difference(resource_set_ipv4([resource_range_ipv4(resources[0].min, v4addr(resources[0].min + 256))])) roas.sort() for roa in roas: - print roa + print roa diff --git a/potpourri/iana-to-csv.py b/potpourri/iana-to-csv.py index f803a21e..cf82c7e9 100644 --- a/potpourri/iana-to-csv.py +++ b/potpourri/iana-to-csv.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -24,10 +24,10 @@ from rpki.csv_utils import csv_reader, csv_writer from rpki.resource_set import resource_bag def iterate_xml(filename, tag): - return lxml.etree.parse(filename).getroot().getiterator(tag) + return lxml.etree.parse(filename).getroot().getiterator(tag) def ns(tag): - return "{http://www.iana.org/assignments}" + tag + return "{http://www.iana.org/assignments}" + tag tag_description = ns("description") tag_designation = ns("designation") @@ -40,30 +40,30 @@ handles = {} rirs = { "legacy" : resource_bag() } for rir in ("AfriNIC", "APNIC", "ARIN", "LACNIC", "RIPE NCC"): - handle = rir.split()[0].lower() - handles[rir] = handles["Assigned by %s" % rir] = handles["Administered by %s" % rir] = handle - rirs[handle] = resource_bag() + handle = rir.split()[0].lower() + handles[rir] = handles["Assigned by %s" % rir] = handles["Administered by %s" % rir] = handle + rirs[handle] = resource_bag() asns = csv_writer("asns.csv") prefixes = csv_writer("prefixes.csv") for record in iterate_xml("as-numbers.xml", tag_record): - description = record.findtext(tag_description) - if description in handles: - asns.writerow((handles[description], record.findtext(tag_number))) - + description = record.findtext(tag_description) + if description in handles: + asns.writerow((handles[description], record.findtext(tag_number))) + for record in iterate_xml("ipv4-address-space.xml", tag_record): - designation = record.findtext(tag_designation) - if record.findtext(tag_status) != "RESERVED": - prefix, prefixlen = [int(i) for i in record.findtext(tag_prefix).split("/")] - if prefixlen != 8: - raise ValueError("%s violated /8 assumption" % record.findtext(tag_prefix)) - rirs[handles.get(designation, "legacy")] |= resource_bag.from_str("%d.0.0.0/8" % prefix) + designation = record.findtext(tag_designation) + if record.findtext(tag_status) != "RESERVED": + prefix, prefixlen = [int(i) for i in record.findtext(tag_prefix).split("/")] + if prefixlen != 8: + raise ValueError("%s violated /8 assumption" % record.findtext(tag_prefix)) + rirs[handles.get(designation, "legacy")] |= resource_bag.from_str("%d.0.0.0/8" % prefix) for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record): - description = record.findtext(tag_description) - if record.findtext(tag_description) in handles: - rirs[handles[description]] |= resource_bag.from_str(record.findtext(tag_prefix)) + description = record.findtext(tag_description) + if record.findtext(tag_description) in handles: + rirs[handles[description]] |= resource_bag.from_str(record.findtext(tag_prefix)) erx = list(csv_reader("erx.csv")) assert all(r in rirs for r, p in erx) @@ -71,15 +71,15 @@ assert all(r in rirs for r, p in erx) erx_overrides = resource_bag.from_str(",".join(p for r, p in erx), allow_overlap = True) for rir in rirs: - if rir != "legacy": - rirs[rir] -= erx_overrides - rirs[rir] |= resource_bag.from_str(",".join(p for r, p in erx if r == rir), allow_overlap = True) + if rir != "legacy": + rirs[rir] -= erx_overrides + rirs[rir] |= resource_bag.from_str(",".join(p for r, p in erx if r == rir), allow_overlap = True) for rir, bag in rirs.iteritems(): - for p in bag.v4: - prefixes.writerow((rir, p)) - for p in bag.v6: - prefixes.writerow((rir, p)) + for p in bag.v4: + prefixes.writerow((rir, p)) + for p in bag.v6: + prefixes.writerow((rir, p)) asns.close() prefixes.close() diff --git a/potpourri/missing-oids.py b/potpourri/missing-oids.py index 16316eac..8557e841 100644 --- a/potpourri/missing-oids.py +++ b/potpourri/missing-oids.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -23,16 +23,16 @@ import rpki.POW.pkix, rpki.oids need_header = True for oid, name in rpki.oids.oid2name.items(): - try: - rpki.POW.pkix.oid2obj(oid) - except: - o = rpki.POW.pkix.Oid() - o.set(oid) - if need_header: - print - print "# Local additions" - need_header = False - print - print "OID =", " ".join(("%02X" % ord(c)) for c in o.toString()) - print "Comment = RPKI project" - print "Description =", name, "(" + " ".join((str(i) for i in oid)) + ")" + try: + rpki.POW.pkix.oid2obj(oid) + except: + o = rpki.POW.pkix.Oid() + o.set(oid) + if need_header: + print + print "# Local additions" + need_header = False + print + print "OID =", " ".join(("%02X" % ord(c)) for c in o.toString()) + print "Comment = RPKI project" + print "Description =", name, "(" + " ".join((str(i) for i in oid)) + ")" diff --git a/potpourri/object-dates.py b/potpourri/object-dates.py index b99441d6..ea2fd489 100644 --- a/potpourri/object-dates.py +++ b/potpourri/object-dates.py @@ -5,11 +5,11 @@ # RPKI objects. # Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# +# # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -28,21 +28,21 @@ extract_flags = (rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_CONTENT_VERIFY) def get_mft(fn): - cms = rpki.POW.Manifest.derReadFile(fn) - cms.verify(rpki.POW.X509Store(), None, extract_flags) - return cms, cms.certs()[0] + cms = rpki.POW.Manifest.derReadFile(fn) + cms.extractWithoutVerifying() + return cms, cms.certs()[0] def get_roa(fn): - return None, rpki.POW.CMS.derReadFile(fn).certs()[0] + return None, rpki.POW.CMS.derReadFile(fn).certs()[0] def get_gbr(fn): - return None, rpki.POW.CMS.derReadFile(fn).certs()[0] + return None, rpki.POW.CMS.derReadFile(fn).certs()[0] def get_crl(fn): - return rpki.POW.CRL.derReadFile(fn), None + return rpki.POW.CRL.derReadFile(fn), None def get_cer(fn): - return None, rpki.POW.X509.derReadFile(fn) + return None, rpki.POW.X509.derReadFile(fn) dispatch = dict(mft = get_mft, roa = get_roa, @@ -51,13 +51,13 @@ dispatch = dict(mft = get_mft, cer = get_cer) for fn in sys.argv[1:]: - obj, cer = dispatch[os.path.splitext(fn)[1][1:]](fn) - print fn - if cer is not None: - print "notBefore: ", cer.getNotBefore() - if obj is not None: - print "thisUpdate:", obj.getThisUpdate() - print "nextUpdate:", obj.getNextUpdate() - if cer is not None: - print "notAfter: ", cer.getNotAfter() - print + obj, cer = dispatch[os.path.splitext(fn)[1][1:]](fn) + print fn + if cer is not None: + print "notBefore: ", cer.getNotBefore() + if obj is not None: + print "thisUpdate:", obj.getThisUpdate() + print "nextUpdate:", obj.getNextUpdate() + if cer is not None: + print "notAfter: ", cer.getNotAfter() + print diff --git a/potpourri/oob-translate.xsl b/potpourri/oob-translate.xsl new file mode 100644 index 00000000..da71e348 --- /dev/null +++ b/potpourri/oob-translate.xsl @@ -0,0 +1,81 @@ +<!-- $Id$ --> +<!-- + - Translate between old "myrpki" XML and current IETF standards + - track out-of-band-setup protocol XML. Well, partially. Much of + - the old protocol is either irrelevant or can't be translated due + - to embedded signatures, but the subset that other implementations + - support is small enough that we can fake something workable. + --> + +<xsl:transform xmlns:xsl = "http://www.w3.org/1999/XSL/Transform" + version = "1.0" + xmlns:myrpki = "http://www.hactrn.net/uris/rpki/myrpki/" + xmlns:oob = "http://www.hactrn.net/uris/rpki/rpki-setup/" + exclude-result-prefixes = "myrpki oob"> + + <xsl:output omit-xml-declaration = "yes" + indent = "yes" + method = "xml" + encoding = "US-ASCII"/> + + <!-- Versions of the respective protocols --> + + <xsl:param name = "myrpki-version" select = "2"/> + <xsl:param name = "oob-version" select = "1"/> + + <!-- Old-style identity to new-style child_request --> + + <xsl:template match = "/myrpki:identity"> + <oob:child_request version = "{$oob-version}" + child_handle = "{@handle}"> + <oob:child_bpki_ta> + <xsl:value-of select = "myrpki:bpki_ta"/> + </oob:child_bpki_ta> + </oob:child_request> + </xsl:template> + + <!-- New-style child_request to old style identity --> + + <xsl:template match = "/oob:child_request"> + <myrpki:identity version = "{$myrpki-version}" + handle = "{@child_handle}"> + <myrpki:bpki_ta> + <xsl:value-of select = "oob:child_bpki_ta"/> + </myrpki:bpki_ta> + </myrpki:identity> + </xsl:template> + + <!-- Old-style parent response to new-style parent_response --> + <!-- Referrals are not translatable due to embedded signatures --> + + <xsl:template match = "/myrpki:parent"> + <oob:parent_response version = "{$oob-version}" + service_uri = "{@service_uri}" + child_handle = "{@child_handle}" + parent_handle = "{@parent_handle}"> + <oob:parent_bpki_ta> + <xsl:value-of select = "myrpki:bpki_resource_ta"/> + </oob:parent_bpki_ta> + <xsl:if test = "repository[@type = 'offer']"> + <oob:offer/> + </xsl:if> + </oob:parent_response> + </xsl:template> + + <!-- New-style parent_response to old-style parent response --> + <!-- Referrals are not translatable due to embedded signatures --> + + <xsl:template match = "/oob:parent_response"> + <myrpki:parent version = "{$myrpki-version}" + service_uri = "{@service_uri}" + child_handle = "{@child_handle}" + parent_handle = "{@parent_handle}"> + <myrpki:bpki_resource_ta> + <xsl:value-of select = "oob:parent_bpki_ta"/> + </myrpki:bpki_resource_ta> + <myrpki:bpki_child_ta/> + <myrpki:repository type = "none"/> + </myrpki:parent> + </xsl:template> + +</xsl:transform> diff --git a/potpourri/print-profile.py b/potpourri/print-profile.py index 081d2602..4012fa3f 100644 --- a/potpourri/print-profile.py +++ b/potpourri/print-profile.py @@ -17,4 +17,4 @@ import pstats, glob for f in glob.iglob("*.prof"): - pstats.Stats(f).sort_stats("time").print_stats(50) + pstats.Stats(f).sort_stats("time").print_stats(50) diff --git a/potpourri/rcynic-diff.py b/potpourri/rcynic-diff.py index 327a7b71..d5be51e0 100644 --- a/potpourri/rcynic-diff.py +++ b/potpourri/rcynic-diff.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# +# # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -21,94 +21,94 @@ Diff a series of rcynic.xml files, sort of. import sys try: - from lxml.etree import ElementTree + from lxml.etree import ElementTree except ImportError: - from xml.etree.ElementTree import ElementTree + from xml.etree.ElementTree import ElementTree show_backup_generation = False show_rsync_transfer = False class Object(object): - def __init__(self, session, uri, generation): - self.session = session - self.uri = uri - self.generation = generation - self.labels = [] + def __init__(self, session, uri, generation): + self.session = session + self.uri = uri + self.generation = generation + self.labels = [] - def add(self, label): - self.labels.append(label) + def add(self, label): + self.labels.append(label) - def __cmp__(self, other): - return cmp(self.labels, other.labels) + def __cmp__(self, other): + return cmp(self.labels, other.labels) def show(old = None, new = None): - assert old is not None or new is not None - assert old is None or new is None or old.uri == new.uri - if old is None: - obj = new - labels = ["+" + label for label in new.labels] - elif new is None: - obj = old - labels = ["-" + label for label in old.labels] - else: - obj = new - labels = [] - for label in new.session.labels: - if label in new.labels and label in old.labels: - labels.append(label) - elif label in new.labels: - labels.append("+" + label) - elif label in old.labels: - labels.append("-" + label) - labels = " ".join(labels) - if show_backup_generation: - print " ", obj.uri, obj.generation, labels - else: - print " ", obj.uri, labels + assert old is not None or new is not None + assert old is None or new is None or old.uri == new.uri + if old is None: + obj = new + labels = ["+" + label for label in new.labels] + elif new is None: + obj = old + labels = ["-" + label for label in old.labels] + else: + obj = new + labels = [] + for label in new.session.labels: + if label in new.labels and label in old.labels: + labels.append(label) + elif label in new.labels: + labels.append("+" + label) + elif label in old.labels: + labels.append("-" + label) + labels = " ".join(labels) + if show_backup_generation: + print " ", obj.uri, obj.generation, labels + else: + print " ", obj.uri, labels class Session(dict): - def __init__(self, name): - self.name = name - tree = ElementTree(file = name) - self.labels = [elt.tag.strip() for elt in tree.find("labels")] - for elt in tree.findall("validation_status"): - generation = elt.get("generation") - status = elt.get("status") - uri = elt.text.strip() - if not show_rsync_transfer and status.startswith("rsync_transfer_"): - continue - if show_backup_generation: - key = (uri, generation) - elif generation == "backup": - continue - else: - key = uri - if key not in self: - self[key] = Object(self, uri, generation) - self[key].add(status) + def __init__(self, name): + self.name = name + tree = ElementTree(file = name) + self.labels = [elt.tag.strip() for elt in tree.find("labels")] + for elt in tree.findall("validation_status"): + generation = elt.get("generation") + status = elt.get("status") + uri = elt.text.strip() + if not show_rsync_transfer and status.startswith("rsync_transfer_"): + continue + if show_backup_generation: + key = (uri, generation) + elif generation == "backup": + continue + else: + key = uri + if key not in self: + self[key] = Object(self, uri, generation) + self[key].add(status) old_db = new_db = None for arg in sys.argv[1:]: - old_db = new_db - new_db = Session(arg) - - if old_db is None: - continue - - only_old = set(old_db) - set(new_db) - only_new = set(new_db) - set(old_db) - changed = set(key for key in (set(old_db) & set(new_db)) if old_db[key] != new_db[key]) - - if only_old or changed or only_new: - print "Comparing", old_db.name, "with", new_db.name - for key in sorted(only_old): - show(old = old_db[key]) - for key in sorted(changed): - show(old = old_db[key], new = new_db[key]) - for key in sorted(only_new): - show(new = new_db[key]) - print + old_db = new_db + new_db = Session(arg) + + if old_db is None: + continue + + only_old = set(old_db) - set(new_db) + only_new = set(new_db) - set(old_db) + changed = set(key for key in (set(old_db) & set(new_db)) if old_db[key] != new_db[key]) + + if only_old or changed or only_new: + print "Comparing", old_db.name, "with", new_db.name + for key in sorted(only_old): + show(old = old_db[key]) + for key in sorted(changed): + show(old = old_db[key], new = new_db[key]) + for key in sorted(only_new): + show(new = new_db[key]) + print diff --git a/potpourri/ripe-asns-to-csv.py b/potpourri/ripe-asns-to-csv.py index 50251ce8..0c85b901 100644 --- a/potpourri/ripe-asns-to-csv.py +++ b/potpourri/ripe-asns-to-csv.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -28,81 +28,81 @@ from rpki.csv_utils import csv_writer class Handle(dict): - want_tags = () + want_tags = () - debug = False + debug = False - def set(self, tag, val): - if tag in self.want_tags: - self[tag] = "".join(val.split(" ")) + def set(self, tag, val): + if tag in self.want_tags: + self[tag] = "".join(val.split(" ")) - def check(self): - for tag in self.want_tags: - if not tag in self: - return False - if self.debug: - self.log() - return True + def check(self): + for tag in self.want_tags: + if not tag in self: + return False + if self.debug: + self.log() + return True - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, - " ".join("%s:%s" % (tag, self.get(tag, "?")) - for tag in self.want_tags)) + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, + " ".join("%s:%s" % (tag, self.get(tag, "?")) + for tag in self.want_tags)) - def log(self): - print repr(self) + def log(self): + print repr(self) - def finish(self, ctx): - self.check() + def finish(self, ctx): + self.check() class aut_num(Handle): - want_tags = ("aut-num", "mnt-by", "as-name") + want_tags = ("aut-num", "mnt-by", "as-name") - def set(self, tag, val): - if tag == "aut-num" and val.startswith("AS"): - val = val[2:] - Handle.set(self, tag, val) + def set(self, tag, val): + if tag == "aut-num" and val.startswith("AS"): + val = val[2:] + Handle.set(self, tag, val) - def finish(self, ctx): - if self.check(): - ctx.asns.writerow((self["mnt-by"], self["aut-num"])) + def finish(self, ctx): + if self.check(): + ctx.asns.writerow((self["mnt-by"], self["aut-num"])) class main(object): - types = dict((x.want_tags[0], x) for x in (aut_num,)) - - - def finish_statement(self, done): - if self.statement: - tag, sep, val = self.statement.partition(":") - assert sep, "Couldn't find separator in %r" % self.statement - tag = tag.strip().lower() - val = val.strip().upper() - if self.cur is None: - self.cur = self.types[tag]() if tag in self.types else False - if self.cur is not False: - self.cur.set(tag, val) - if done and self.cur: - self.cur.finish(self) - self.cur = None - - filenames = ("ripe.db.aut-num.gz",) - - def __init__(self): - self.asns = csv_writer("asns.csv") - for fn in self.filenames: - f = gzip.open(fn) - self.statement = "" - self.cur = None - for line in f: - line = line.expandtabs().partition("#")[0].rstrip("\n") - if line and not line[0].isalpha(): - self.statement += line[1:] if line[0] == "+" else line - else: - self.finish_statement(not line) - self.statement = line - self.finish_statement(True) - f.close() - self.asns.close() + types = dict((x.want_tags[0], x) for x in (aut_num,)) + + + def finish_statement(self, done): + if self.statement: + tag, sep, val = self.statement.partition(":") + assert sep, "Couldn't find separator in %r" % self.statement + tag = tag.strip().lower() + val = val.strip().upper() + if self.cur is None: + self.cur = self.types[tag]() if tag in self.types else False + if self.cur is not False: + self.cur.set(tag, val) + if done and self.cur: + self.cur.finish(self) + self.cur = None + + filenames = ("ripe.db.aut-num.gz",) + + def __init__(self): + self.asns = csv_writer("asns.csv") + for fn in self.filenames: + f = gzip.open(fn) + self.statement = "" + self.cur = None + for line in f: + line = line.expandtabs().partition("#")[0].rstrip("\n") + if line and not line[0].isalpha(): + self.statement += line[1:] if line[0] == "+" else line + else: + self.finish_statement(not line) + self.statement = line + self.finish_statement(True) + f.close() + self.asns.close() main() diff --git a/potpourri/ripe-to-csv.py b/potpourri/ripe-to-csv.py index b864345b..308917ce 100644 --- a/potpourri/ripe-to-csv.py +++ b/potpourri/ripe-to-csv.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -41,98 +41,98 @@ from rpki.csv_utils import csv_writer class Handle(dict): - want_tags = () + want_tags = () - want_status = ("ASSIGNED", "ASSIGNEDPA", "ASSIGNEDPI") + want_status = ("ASSIGNED", "ASSIGNEDPA", "ASSIGNEDPI") - debug = False + debug = False - def set(self, tag, val): - if tag in self.want_tags: - self[tag] = "".join(val.split(" ")) + def set(self, tag, val): + if tag in self.want_tags: + self[tag] = "".join(val.split(" ")) - def check(self): - for tag in self.want_tags: - if not tag in self: - return False - if self.debug: - self.log() - return True + def check(self): + for tag in self.want_tags: + if not tag in self: + return False + if self.debug: + self.log() + return True - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, - " ".join("%s:%s" % (tag, self.get(tag, "?")) - for tag in self.want_tags)) + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, + " ".join("%s:%s" % (tag, self.get(tag, "?")) + for tag in self.want_tags)) - def log(self): - print repr(self) + def log(self): + print repr(self) - def finish(self, ctx): - self.check() + def finish(self, ctx): + self.check() class aut_num(Handle): - want_tags = ("aut-num", "mnt-by") # "as-name" + want_tags = ("aut-num", "mnt-by") # "as-name" - def set(self, tag, val): - if tag == "aut-num" and val.startswith("AS"): - val = val[2:] - Handle.set(self, tag, val) + def set(self, tag, val): + if tag == "aut-num" and val.startswith("AS"): + val = val[2:] + Handle.set(self, tag, val) - def finish(self, ctx): - if self.check(): - ctx.asns.writerow((self["mnt-by"], self["aut-num"])) + def finish(self, ctx): + if self.check(): + ctx.asns.writerow((self["mnt-by"], self["aut-num"])) class inetnum(Handle): - want_tags = ("inetnum", "netname", "status") # "mnt-by" - - def finish(self, ctx): - if self.check() and self["status"] in self.want_status: - ctx.prefixes.writerow((self["netname"], self["inetnum"])) + want_tags = ("inetnum", "netname", "status") # "mnt-by" + + def finish(self, ctx): + if self.check() and self["status"] in self.want_status: + ctx.prefixes.writerow((self["netname"], self["inetnum"])) class inet6num(Handle): - want_tags = ("inet6num", "netname", "status") # "mnt-by" + want_tags = ("inet6num", "netname", "status") # "mnt-by" - def finish(self, ctx): - if self.check() and self["status"] in self.want_status: - ctx.prefixes.writerow((self["netname"], self["inet6num"])) + def finish(self, ctx): + if self.check() and self["status"] in self.want_status: + ctx.prefixes.writerow((self["netname"], self["inet6num"])) class main(object): - types = dict((x.want_tags[0], x) for x in (aut_num, inetnum, inet6num)) - - def finish_statement(self, done): - if self.statement: - tag, sep, val = self.statement.partition(":") - assert sep, "Couldn't find separator in %r" % self.statement - tag = tag.strip().lower() - val = val.strip().upper() - if self.cur is None: - self.cur = self.types[tag]() if tag in self.types else False - if self.cur is not False: - self.cur.set(tag, val) - if done and self.cur: - self.cur.finish(self) - self.cur = None - - filenames = ("ripe.db.aut-num.gz", "ripe.db.inet6num.gz", "ripe.db.inetnum.gz") - - def __init__(self): - self.asns = csv_writer("asns.csv") - self.prefixes = csv_writer("prefixes.csv") - for fn in self.filenames: - f = gzip.open(fn) - self.statement = "" - self.cur = None - for line in f: - line = line.expandtabs().partition("#")[0].rstrip("\n") - if line and not line[0].isalpha(): - self.statement += line[1:] if line[0] == "+" else line - else: - self.finish_statement(not line) - self.statement = line - self.finish_statement(True) - f.close() - self.asns.close() - self.prefixes.close() + types = dict((x.want_tags[0], x) for x in (aut_num, inetnum, inet6num)) + + def finish_statement(self, done): + if self.statement: + tag, sep, val = self.statement.partition(":") + assert sep, "Couldn't find separator in %r" % self.statement + tag = tag.strip().lower() + val = val.strip().upper() + if self.cur is None: + self.cur = self.types[tag]() if tag in self.types else False + if self.cur is not False: + self.cur.set(tag, val) + if done and self.cur: + self.cur.finish(self) + self.cur = None + + filenames = ("ripe.db.aut-num.gz", "ripe.db.inet6num.gz", "ripe.db.inetnum.gz") + + def __init__(self): + self.asns = csv_writer("asns.csv") + self.prefixes = csv_writer("prefixes.csv") + for fn in self.filenames: + f = gzip.open(fn) + self.statement = "" + self.cur = None + for line in f: + line = line.expandtabs().partition("#")[0].rstrip("\n") + if line and not line[0].isalpha(): + self.statement += line[1:] if line[0] == "+" else line + else: + self.finish_statement(not line) + self.statement = line + self.finish_statement(True) + f.close() + self.asns.close() + self.prefixes.close() main() diff --git a/potpourri/roa-to-irr.py b/potpourri/roa-to-irr.py index 500596f8..748f37fa 100644 --- a/potpourri/roa-to-irr.py +++ b/potpourri/roa-to-irr.py @@ -1,12 +1,12 @@ # $Id$ -# +# # Copyright (C) 2014 Dragon Research Labs ("DRL") # Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notices and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL # WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR @@ -34,128 +34,128 @@ from time import time, strftime, gmtime, asctime args = None class route(object): - """ - Interesting parts of a route object. - """ - - def __init__(self, label, uri, asnum, date, prefix, prefixlen, max_prefixlen): - self.label = label - self.uri = uri - self.asn = asnum - self.date = date - self.prefix = prefix - self.prefixlen = prefixlen - self.max_prefixlen = self.prefixlen if max_prefixlen is None else max_prefixlen - - def __cmp__(self, other): - result = cmp(self.asn, other.asn) - if result == 0: - result = cmp(self.prefix, other.prefix) - if result == 0: - result = cmp(self.prefixlen, other.prefixlen) - if result == 0: - result = cmp(self.max_prefixlen, other.max_prefixlen) - if result == 0: - result = cmp(self.date, other.date) - return result - - def __str__(self): - lines = "\n" if args.email else "" - lines += dedent('''\ - {self.label:<14s}{self.prefix}/{self.prefixlen} - descr: {self.prefix}/{self.prefixlen}-{self.max_prefixlen} - origin: AS{self.asn:d} - notify: {args.notify} - mnt-by: {args.mnt_by} - changed: {args.changed_by} {self.date} - source: {args.source} - ''').format(self = self, args = args) - if args.password is not None: - lines += "override: {}\n".format(args.password) - return lines - - def write(self, output_directory): - name = "{0.prefix}-{0.prefixlen}-{0.max_prefixlen}-AS{0.asn:d}-{0.date}".format(self) - with open(os.path.join(output_directory, name), "w") as f: - f.write(str(self)) + """ + Interesting parts of a route object. + """ + + def __init__(self, label, uri, asnum, date, prefix, prefixlen, max_prefixlen): + self.label = label + self.uri = uri + self.asn = asnum + self.date = date + self.prefix = prefix + self.prefixlen = prefixlen + self.max_prefixlen = self.prefixlen if max_prefixlen is None else max_prefixlen + + def __cmp__(self, other): + result = cmp(self.asn, other.asn) + if result == 0: + result = cmp(self.prefix, other.prefix) + if result == 0: + result = cmp(self.prefixlen, other.prefixlen) + if result == 0: + result = cmp(self.max_prefixlen, other.max_prefixlen) + if result == 0: + result = cmp(self.date, other.date) + return result + + def __str__(self): + lines = "\n" if args.email else "" + lines += dedent('''\ + {self.label:<14s}{self.prefix}/{self.prefixlen} + descr: {self.prefix}/{self.prefixlen}-{self.max_prefixlen} + origin: AS{self.asn:d} + notify: {args.notify} + mnt-by: {args.mnt_by} + changed: {args.changed_by} {self.date} + source: {args.source} + ''').format(self = self, args = args) + if args.password is not None: + lines += "override: {}\n".format(args.password) + return lines + + def write(self, output_directory): + name = "{0.prefix}-{0.prefixlen}-{0.max_prefixlen}-AS{0.asn:d}-{0.date}".format(self) + with open(os.path.join(output_directory, name), "w") as f: + f.write(str(self)) class route_list(list): - """ - A list of route objects. - """ - - def __init__(self, rcynic_dir): - for root, dirs, files in os.walk(rcynic_dir): - for f in files: - if f.endswith(".roa"): - path = os.path.join(root, f) - uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") - roa = rpki.x509.ROA(DER_file = path) - roa.extract() - assert roa.get_POW().getVersion() == 0, "ROA version is {:d}, expected 0".format(roa.get_POW().getVersion()) - asnum = roa.get_POW().getASID() - notBefore = roa.get_POW().certs()[0].getNotBefore().strftime("%Y%m%d") - v4, v6 = roa.get_POW().getPrefixes() - if v4 is not None: - for prefix, prefixlen, max_prefixlen in v4: - self.append(route("route:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) - if v6 is not None: - for prefix, prefixlen, max_prefixlen in v6: - self.append(route("route6:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) - self.sort() - for i in xrange(len(self) - 2, -1, -1): - if self[i] == self[i + 1]: - del self[i + 1] + """ + A list of route objects. + """ + + def __init__(self, rcynic_dir): + for root, dirs, files in os.walk(rcynic_dir): + for f in files: + if f.endswith(".roa"): + path = os.path.join(root, f) + uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") + roa = rpki.x509.ROA(DER_file = path) + roa.extract() + assert roa.get_POW().getVersion() == 0, "ROA version is {:d}, expected 0".format(roa.get_POW().getVersion()) + asnum = roa.get_POW().getASID() + notBefore = roa.get_POW().certs()[0].getNotBefore().strftime("%Y%m%d") + v4, v6 = roa.get_POW().getPrefixes() + if v4 is not None: + for prefix, prefixlen, max_prefixlen in v4: + self.append(route("route:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) + if v6 is not None: + for prefix, prefixlen, max_prefixlen in v6: + self.append(route("route6:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) + self.sort() + for i in xrange(len(self) - 2, -1, -1): + if self[i] == self[i + 1]: + del self[i + 1] def email_header(f): - if args.email: - now = time() - f.write(dedent('''\ - From {from_} {ctime} - Date: {date} - From: {from_} - Subject: Fake email header to make irr_rpsl_submit happy - Message-Id: <{pid}.{seconds}@{hostname}> - ''').format(from_ = args.from_, - ctime = asctime(gmtime(now)), - date = strftime("%d %b %Y %T %z", gmtime(now)), - pid = os.getpid(), - seconds = now, - hostname = gethostname())) + if args.email: + now = time() + f.write(dedent('''\ + From {from_} {ctime} + Date: {date} + From: {from_} + Subject: Fake email header to make irr_rpsl_submit happy + Message-Id: <{pid}.{seconds}@{hostname}> + ''').format(from_ = args.from_, + ctime = asctime(gmtime(now)), + date = strftime("%d %b %Y %T %z", gmtime(now)), + pid = os.getpid(), + seconds = now, + hostname = gethostname())) def main(): - global args - whoami = "{}@{}".format(os.getlogin(), gethostname()) - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--changed_by", default = whoami, help = "override \"changed:\" value") - parser.add_argument("-f", "--from", dest="from_", default = whoami, help = "override \"from:\" header when using --email") - parser.add_argument("-m", "--mnt_by", default = "MAINT-RPKI", help = "override \"mnt-by:\" value") - parser.add_argument("-n", "--notify", default = whoami, help = "override \"notify:\" value") - parser.add_argument("-p", "--password", help = "specify \"override:\" password") - parser.add_argument("-s", "--source", default = "RPKI", help = "override \"source:\" value") - group = parser.add_mutually_exclusive_group() - group.add_argument("-e", "--email", action = "store_true", help = "generate fake RFC 822 header suitable for piping to irr_rpsl_submit") - group.add_argument("-d", "--output-directory", help = "write route and route6 objects to directory OUTPUT, one object per file") - parser.add_argument("authenticated_directory", help = "directory tree containing authenticated rcynic output") - args = parser.parse_args() - - if not os.path.isdir(args.authenticated_directory): - sys.exit('"{}" is not a directory'.format(args.authenticated_directory)) - - routes = route_list(args.authenticated_directory) - - if args.output_directory: - if not os.path.isdir(args.output_directory): - os.makedirs(args.output_directory) - for r in routes: - r.write(args.output_directory) - else: - email_header(sys.stdout) - for r in routes: - sys.stdout.write(str(r)) + global args + whoami = "{}@{}".format(os.getlogin(), gethostname()) + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--changed_by", default = whoami, help = "override \"changed:\" value") + parser.add_argument("-f", "--from", dest="from_", default = whoami, help = "override \"from:\" header when using --email") + parser.add_argument("-m", "--mnt_by", default = "MAINT-RPKI", help = "override \"mnt-by:\" value") + parser.add_argument("-n", "--notify", default = whoami, help = "override \"notify:\" value") + parser.add_argument("-p", "--password", help = "specify \"override:\" password") + parser.add_argument("-s", "--source", default = "RPKI", help = "override \"source:\" value") + group = parser.add_mutually_exclusive_group() + group.add_argument("-e", "--email", action = "store_true", help = "generate fake RFC 822 header suitable for piping to irr_rpsl_submit") + group.add_argument("-d", "--output-directory", help = "write route and route6 objects to directory OUTPUT, one object per file") + parser.add_argument("authenticated_directory", help = "directory tree containing authenticated rcynic output") + args = parser.parse_args() + + if not os.path.isdir(args.authenticated_directory): + sys.exit('"{}" is not a directory'.format(args.authenticated_directory)) + + routes = route_list(args.authenticated_directory) + + if args.output_directory: + if not os.path.isdir(args.output_directory): + os.makedirs(args.output_directory) + for r in routes: + r.write(args.output_directory) + else: + email_header(sys.stdout) + for r in routes: + sys.stdout.write(str(r)) if __name__ == "__main__": - main() + main() diff --git a/potpourri/rrd-rcynic-history.py b/potpourri/rrd-rcynic-history.py index 8a0d50a8..45aec6c5 100644 --- a/potpourri/rrd-rcynic-history.py +++ b/potpourri/rrd-rcynic-history.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -39,159 +39,159 @@ os.putenv("TZ", "UTC") time.tzset() def parse_utc(s): - return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ"))) + return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ"))) class Host(object): - """ - A host object represents all the data collected for one host for a given session. - """ - - def __init__(self, hostname, timestamp): - self.hostname = hostname - self.timestamp = timestamp - self.elapsed = 0 - self.connections = 0 - self.failures = 0 - self.uris = set() - - def add_connection(self, elt): - self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) - self.connections += 1 - if elt.get("error") is not None: - self.failures += 1 - - def add_object_uri(self, u): - self.uris.add(u) - - @property - def failed(self): - return 1 if self.failures > 0 else 0 - - @property - def objects(self): - return len(self.uris) - - field_table = (("timestamp", None, None, None), - ("connections", "GAUGE", "Connections", "FF0000"), - ("objects", "GAUGE", "Objects", "00FF00"), - ("elapsed", "GAUGE", "Fetch Time", "0000FF"), - ("failed", "ABSOLUTE", "Failed", "00FFFF")) - - @property - def field_values(self): - return tuple(str(getattr(self, field[0])) for field in self.field_table) - - @classmethod - def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"): - return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum) - for field in cls.field_table if field[1] is not None] - - @classmethod - def field_graph_specifiers(cls, hostname): - result = [] - for field in cls.field_table: - if field[1] is not None: - result.append("DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], hostname, field[0])) - result.append("'LINE1:%s#%s:%s'" % (field[0], field[3], field[2])) - return result - - def save(self, rrdtable): - rrdtable.add(self.hostname, self.field_values) + """ + A host object represents all the data collected for one host for a given session. + """ + + def __init__(self, hostname, timestamp): + self.hostname = hostname + self.timestamp = timestamp + self.elapsed = 0 + self.connections = 0 + self.failures = 0 + self.uris = set() + + def add_connection(self, elt): + self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) + self.connections += 1 + if elt.get("error") is not None: + self.failures += 1 + + def add_object_uri(self, u): + self.uris.add(u) + + @property + def failed(self): + return 1 if self.failures > 0 else 0 + + @property + def objects(self): + return len(self.uris) + + field_table = (("timestamp", None, None, None), + ("connections", "GAUGE", "Connections", "FF0000"), + ("objects", "GAUGE", "Objects", "00FF00"), + ("elapsed", "GAUGE", "Fetch Time", "0000FF"), + ("failed", "ABSOLUTE", "Failed", "00FFFF")) + + @property + def field_values(self): + return tuple(str(getattr(self, field[0])) for field in self.field_table) + + @classmethod + def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"): + return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum) + for field in cls.field_table if field[1] is not None] + + @classmethod + def field_graph_specifiers(cls, hostname): + result = [] + for field in cls.field_table: + if field[1] is not None: + result.append("DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], hostname, field[0])) + result.append("'LINE1:%s#%s:%s'" % (field[0], field[3], field[2])) + return result + + def save(self, rrdtable): + rrdtable.add(self.hostname, self.field_values) class Session(dict): - """ - A session corresponds to one XML file. This is a dictionary of Host - objects, keyed by hostname. - """ - - def __init__(self, timestamp): - dict.__init__(self) - self.timestamp = timestamp - - @property - def hostnames(self): - return set(self.iterkeys()) - - def add_connection(self, elt): - hostname = urlparse.urlparse(elt.text.strip()).hostname - if hostname not in self: - self[hostname] = Host(hostname, self.timestamp) - self[hostname].add_connection(elt) - - def add_object_uri(self, u): - h = urlparse.urlparse(u).hostname - if h and h in self: - self[h].add_object_uri(u) - - def save(self, rrdtable): - for h in self.itervalues(): - h.save(rrdtable) + """ + A session corresponds to one XML file. This is a dictionary of Host + objects, keyed by hostname. + """ + + def __init__(self, timestamp): + dict.__init__(self) + self.timestamp = timestamp + + @property + def hostnames(self): + return set(self.iterkeys()) + + def add_connection(self, elt): + hostname = urlparse.urlparse(elt.text.strip()).hostname + if hostname not in self: + self[hostname] = Host(hostname, self.timestamp) + self[hostname].add_connection(elt) + + def add_object_uri(self, u): + h = urlparse.urlparse(u).hostname + if h and h in self: + self[h].add_object_uri(u) + + def save(self, rrdtable): + for h in self.itervalues(): + h.save(rrdtable) class RRDTable(dict): - """ - Final data we're going to be sending to rrdtool. We need to buffer - it until we're done because we have to sort it. Might be easier - just to sort the maildir, then again it might be easier to get rid - of the maildir too once we're dealing with current data. We'll see. - """ - - def __init__(self, rrdtool = sys.stdout): - dict.__init__(self) - self.rrdtool = rrdtool - - def add(self, hostname, data): - if hostname not in self: - self[hostname] = [] - self[hostname].append(data) - - def sort(self): - for data in self.itervalues(): - data.sort() - - @property - def oldest(self): - return min(min(datum[0] for datum in data) for data in self.itervalues()) - - rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps for steps in (1, 4, 24)) - - def create(self): - start = self.oldest - ds_list = Host.field_ds_specifiers() - ds_list.extend(self.rras) - for hostname in self: - if not os.path.exists("%s.rrd" % hostname): - self.rrdtool("create %s.rrd --start %s --step 3600 %s\n" % (hostname, start, " ".join(ds_list))) - - def update(self): - for hostname, data in self.iteritems(): - for datum in data: - self.rrdtool("update %s.rrd %s\n" % (hostname, ":".join(str(d) for d in datum))) - - def graph(self): - for hostname in self: - self.rrdtool("graph %s.png --start -90d %s\n" % (hostname, " ".join(Host.field_graph_specifiers(hostname)))) + """ + Final data we're going to be sending to rrdtool. We need to buffer + it until we're done because we have to sort it. Might be easier + just to sort the maildir, then again it might be easier to get rid + of the maildir too once we're dealing with current data. We'll see. + """ + + def __init__(self, rrdtool = sys.stdout): + dict.__init__(self) + self.rrdtool = rrdtool + + def add(self, hostname, data): + if hostname not in self: + self[hostname] = [] + self[hostname].append(data) + + def sort(self): + for data in self.itervalues(): + data.sort() + + @property + def oldest(self): + return min(min(datum[0] for datum in data) for data in self.itervalues()) + + rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps for steps in (1, 4, 24)) + + def create(self): + start = self.oldest + ds_list = Host.field_ds_specifiers() + ds_list.extend(self.rras) + for hostname in self: + if not os.path.exists("%s.rrd" % hostname): + self.rrdtool("create %s.rrd --start %s --step 3600 %s\n" % (hostname, start, " ".join(ds_list))) + + def update(self): + for hostname, data in self.iteritems(): + for datum in data: + self.rrdtool("update %s.rrd %s\n" % (hostname, ":".join(str(d) for d in datum))) + + def graph(self): + for hostname in self: + self.rrdtool("graph %s.png --start -90d %s\n" % (hostname, " ".join(Host.field_graph_specifiers(hostname)))) mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False) rrdtable = RRDTable() for i, key in enumerate(mb.iterkeys(), 1): - sys.stderr.write("\r%s %d/%d..." % ("|\\-/"[i & 3], i, len(mb))) - - assert not mb[key].is_multipart() - input = ElementTreeFromString(mb[key].get_payload()) - date = input.get("date") - sys.stderr.write("%s..." % date) - session = Session(parse_utc(date)) - for elt in input.findall("rsync_history"): - session.add_connection(elt) - for elt in input.findall("validation_status"): - if elt.get("generation") == "current": - session.add_object_uri(elt.text.strip()) - session.save(rrdtable) - - # XXX - #if i > 4: break + sys.stderr.write("\r%s %d/%d..." % ("|\\-/"[i & 3], i, len(mb))) + + assert not mb[key].is_multipart() + input = ElementTreeFromString(mb[key].get_payload()) + date = input.get("date") + sys.stderr.write("%s..." % date) + session = Session(parse_utc(date)) + for elt in input.findall("rsync_history"): + session.add_connection(elt) + for elt in input.findall("validation_status"): + if elt.get("generation") == "current": + session.add_object_uri(elt.text.strip()) + session.save(rrdtable) + + # XXX + #if i > 4: break sys.stderr.write("\n") diff --git a/potpourri/rrdp-fetch-from-tal b/potpourri/rrdp-fetch-from-tal new file mode 100755 index 00000000..08d245dd --- /dev/null +++ b/potpourri/rrdp-fetch-from-tal @@ -0,0 +1,229 @@ +#!/usr/bin/env python +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Fetch RPKI data using RRDP starting from a TAL. + +Work in progress, don't be too surprised by anything this does or +doesn't do. +""" + +import rpki.relaxng +import rpki.x509 +import lxml.etree +import argparse +import urlparse +import urllib2 +import sys +import os + + +class Tags(object): + def __init__(self, *tags): + for tag in tags: + setattr(self, tag, rpki.relaxng.rrdp.xmlns + tag) + +tags = Tags("notification", "delta", "snapshot", "publish", "withdraw") + + +class RSyncHandler(urllib2.BaseHandler): + """ + Jam support for rsync:// URIs into urllib2 framework. + Very basic, probably not paranoid enough. + """ + + _n = 0 + + def rsync_open(self, req): + import subprocess, mimetools + u = req.get_full_url() + if u.endswith("/"): + raise urllib2.URLError("rsync directory URI not allowed") + t = "/tmp/rrdp-fetch-from-tal.%d.%d" % (os.getpid(), self._n) + self._n += 1 + subprocess.check_call(("rsync", u, t)) + h = mimetools.Message(open("/dev/null")) + h["Content-type"] = "text/plain" + h["Content-length"] = str(os.stat(t).st_size) + f = open(t, "rb") + os.unlink(t) + return urllib2.addinfourl(f, h, u) + +urllib2.install_opener(urllib2.build_opener(RSyncHandler)) + + +class main(object): + + def __init__(self): + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("--rcynic-tree", default = "rcynic-data/unauthenticated", + help = "directory tree in which to write extracted RPKI objects") + parser.add_argument("--serial-filename", # default handled later + help = "file name in which to store RRDP serial number") + parser.add_argument("tal", help = "trust anchor locator") + self.args = parser.parse_args() + if not os.path.isdir(self.args.rcynic_tree): + os.makedirs(self.args.rcynic_tree) + self.urls = set() + self.ta = self.ta_fetch() + url = self.ta.get_sia_rrdp_notify() + if url is None: + sys.exit("Couldn't get RRDP URI from trust anchor") + self.rrdp_fetch(url) + self.write_ta() + + def rrdp_fetch(self, url): + if url in self.urls: + print "Already fetched %s, skipping" % url + return + self.urls.add(url) + xml = lxml.etree.ElementTree(file = urllib2.urlopen(url)).getroot() + rpki.relaxng.rrdp.assertValid(xml) + if xml.tag[len(rpki.relaxng.rrdp.xmlns):] != "notification": + sys.exit("Expected notification at %s, found %s" % (url, xml.tag)) + self.prettyprint_notification(xml) + + # We should be checking session_id here, but we're not storing it yet + + old_serial = self.get_serial() + new_serial = int(xml.get("serial")) + deltas = dict((int(elt.get("serial")), elt) + for elt in xml.iterchildren(tags.delta)) + if old_serial == 0 or not all(serial + 1 in deltas + for serial in xrange(old_serial, new_serial)): + return self.snapshot_fetch(xml.iterchildren(tags.snapshot).next()) + for serial in sorted(deltas): + if serial > old_serial: + self.delta_fetch(deltas[serial]) + + def prettyprint_notification(self, xml): + print "Notification version %s session %s serial %s" % ( + xml.get("version"), xml.get("session_id"), xml.get("serial")) + elt = xml.iterchildren(tags.snapshot).next() + print " Snapshot URI %s hash %s" % ( + elt.get("uri"), elt.get("hash")) + for elt in xml.iterchildren(tags.delta): + print " Delta %6s URI %s hash %s" % ( + elt.get("serial"), elt.get("uri"), elt.get("hash")) + + def ta_fetch(self): + with open(self.args.tal, "r") as f: + tal = f.read() + uris, key = tal.split("\n\n", 2) + key = rpki.x509.PublicKey(Base64 = key) + for uri in uris.split(): + ta = rpki.x509.X509(DER = urllib2.urlopen(uri).read()) + if ta.getPublicKey() == key: + return ta + print "TAL key mismatch for certificate", url + sys.exit("Could not fetch trust anchor") + + @property + def serial_filename(self): + return self.args.serial_filename or os.path.join(self.args.rcynic_tree, "serial") + + def get_serial(self): + try: + with open(self.serial_filename, "r") as f: + return int(f.read().strip()) + except: + return 0 + + def set_serial(self, value): + with open(self.serial_filename, "w") as f: + f.write("%s\n" % value) + + def uri_to_filename(self, uri): + assert uri.startswith("rsync://") + return os.path.join(self.args.rcynic_tree, uri[len("rsync://"):]) + + def add_obj(self, uri, obj): + fn = self.uri_to_filename(uri) + dn = os.path.dirname(fn) + if not os.path.isdir(dn): + os.makedirs(dn) + with open(fn, "wb") as f: + f.write(obj) + + def del_obj(self, uri, hash): + fn = self.uri_to_filename(uri) + with open(fn, "rb") as f: + if hash.lower() != rpki.x509.sha256(f.read()).encode("hex"): + raise RuntimeError("Hash mismatch for URI %s" % uri) + os.unlink(fn) + dn = os.path.dirname(fn) + while True: + try: + os.rmdir(dn) + except OSError: + break + else: + dn = os.path.dirname(dn) + + def xml_fetch(self, elt): + url = elt.get("uri") + hash = elt.get("hash").lower() + print "Fetching", url + text = urllib2.urlopen(url).read() + h = rpki.x509.sha256(text).encode("hex") + if h != hash: + sys.exit("Bad hash for %s: expected %s got %s" % (url, hash, h)) + xml = lxml.etree.XML(text) + rpki.relaxng.rrdp.schema.assertValid(xml) + return xml + + def snapshot_fetch(self, xml): + xml = self.xml_fetch(xml) + print "Unpacking snapshot version %s session %s serial %6s" % ( + xml.get("version"), xml.get("session_id"), xml.get("serial")) + for elt in xml.iterchildren(tags.publish): + print " ", elt.get("uri") + self.add_obj(elt.get("uri"), elt.text.decode("base64")) + self.set_serial(xml.get("serial")) + + def delta_fetch(self, xml): + xml = self.xml_fetch(xml) + old_serial = int(self.get_serial()) + new_serial = int(xml.get("serial")) + print "Unpacking deltas version %s session %s serial %s" % ( + xml.get("version"), xml.get("session_id"), new_serial) + if old_serial != new_serial - 1: + raise RuntimeError("Can't apply deltas: old serial %s new serial %s" % (old_serial, new_serial)) + for i, elt in enumerate(xml.iterchildren(tags.withdraw)): + uri = elt.get("uri") + hash = elt.get("hash") + print " %3d withdraw URI %s hash %s" % (i, uri, hash) + self.del_obj(uri, hash) + for i, elt in enumerate(xml.iterchildren(tags.publish)): + uri = elt.get("uri") + hash = elt.get("hash", None) + print " %3d publish URI %s hash %s" % (i, uri, hash) + if hash is not None: + self.del_obj(uri, hash) + self.add_obj(elt.get("uri"), elt.text.decode("base64")) + self.set_serial(new_serial) + + def write_ta(self): + der = self.ta.get_DER() + fn = rpki.x509.sha256(der).encode("hex") + ".cer" + if not os.path.exists(fn): + print "Writing", fn + with open(fn, "wb") as f: + f.write(der) + +if __name__ == "__main__": + main() diff --git a/potpourri/rrdp-fetch.py b/potpourri/rrdp-fetch.py new file mode 100755 index 00000000..b8d927ee --- /dev/null +++ b/potpourri/rrdp-fetch.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Fetch an RRDP notifcation file and follow all the links. Should be +merged into rrdp-test-tool eventually, but one thing at a time. +""" + +from urllib2 import urlopen +from lxml.etree import ElementTree, XML +from socket import getfqdn +from rpki.x509 import sha256 +from rpki.relaxng import rrdp +from urlparse import urlparse +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +class BadHash(Exception): + "Calculated hash value doesn't match expected hash value." + +def fetch(elt): + uri = elt.get("uri") + hash = elt.get("hash").lower() + print "Fetching", uri + + text = urlopen(uri).read() + h = sha256(text).encode("hex") + if h != hash: + raise BadHash("Bad hash for %s: expected %s got %s" % (uri, hash, h)) + + xml = XML(text) + rrdp.schema.assertValid(xml) + + u = urlparse(uri) + fn = u.netloc + u.path + + return elt, xml, fn + +parser = ArgumentParser(description = __doc__, formatter_class = ArgumentDefaultsHelpFormatter) +parser.add_argument("uri", nargs = "?", + default = "http://" + getfqdn() + "/rrdp/updates.xml", + help = "RRDP notification file to fetch") +args = parser.parse_args() + +updates = ElementTree(file = urlopen(args.uri)) +rrdp.schema.assertValid(updates) + +snapshot = fetch(updates.find(rrdp.xmlns + "snapshot")) + +deltas = [fetch(elt) for elt in updates.findall(rrdp.xmlns + "delta")] + +print updates +print snapshot +for delta in deltas: + print delta diff --git a/potpourri/rrdp-test-tool b/potpourri/rrdp-test-tool new file mode 100755 index 00000000..ccf17960 --- /dev/null +++ b/potpourri/rrdp-test-tool @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Test tool for prototype RRDP implementation. Eventually some of this +code will likely be refactored into more user-friendly form, but for +the moment this just does whatever insane thing I need to do this week +for testing. +""" + +import rpki.relaxng +import rpki.x509 +import lxml.etree +import argparse +import os + +class Tags(object): + def __init__(self, *tags): + for tag in tags: + setattr(self, tag, rpki.relaxng.rrdp.xmlns + tag) + +tags = Tags("notification", "delta", "snapshot", "publish", "withdraw") + +class main(object): + + def __init__(self): + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("--rcynic-tree", default = "rcynic-data/unauthenticated", + help = "directory tree in which to write extracted RPKI objects") + parser.add_argument("--serial-filename", + help = "file name in which to store RRDP serial number") + parser.add_argument("rrdp_file", nargs = "+", + help = "RRDP snapshot or deltas file") + self.args = parser.parse_args() + if not os.path.isdir(self.args.rcynic_tree): + os.makedirs(self.args.rcynic_tree) + for rrdp_file in self.args.rrdp_file: + xml = lxml.etree.ElementTree(file = rrdp_file).getroot() + rpki.relaxng.rrdp.assertValid(xml) + getattr(self, "handle_" + xml.tag[len(rpki.relaxng.rrdp.xmlns):])(xml) + + @property + def serial_filename(self): + return self.args.serial_filename or os.path.join(self.args.rcynic_tree, "serial") + + def get_serial(self): + with open(self.serial_filename, "r") as f: + return f.read().strip() + + def set_serial(self, value): + with open(self.serial_filename, "w") as f: + f.write("%s\n" % value) + + def handle_notification(self, xml): + print "Notification version %s session %s serial %s" % ( + xml.get("version"), xml.get("session_id"), xml.get("serial")) + assert xml[0].tag == tags.snapshot + print " Snapshot URI %s hash %s" % ( + xml[0].get("uri"), xml[0].get("hash")) + for i, elt in enumerate(xml.iterchildren(tags.delta)): + print " Delta %3d serial %6s URI %s hash %s" % ( + i, elt.get("serial"), elt.get("uri"), elt.get("hash")) + + def uri_to_filename(self, uri): + assert uri.startswith("rsync://") + return os.path.join(self.args.rcynic_tree, uri[len("rsync://"):]) + + def add_obj(self, uri, obj): + fn = self.uri_to_filename(uri) + dn = os.path.dirname(fn) + if not os.path.isdir(dn): + os.makedirs(dn) + with open(fn, "wb") as f: + f.write(obj) + + def del_obj(self, uri, hash): + fn = self.uri_to_filename(uri) + with open(fn, "rb") as f: + if hash.lower() != rpki.x509.sha256(f.read()).encode("hex"): + raise RuntimeError("Hash mismatch for URI %s" % uri) + os.unlink(fn) + dn = os.path.dirname(fn) + while True: + try: + os.rmdir(dn) + except OSError: + break + else: + dn = os.path.dirname(dn) + + def handle_snapshot(self, xml): + print "Unpacking snapshot version %s session %s serial %6s" % ( + xml.get("version"), xml.get("session_id"), xml.get("serial")) + for elt in xml.iterchildren(tags.publish): + print " ", elt.get("uri") + self.add_obj(elt.get("uri"), elt.text.decode("base64")) + self.set_serial(xml.get("serial")) + + def handle_delta(self, xml): + old_serial = int(self.get_serial()) + new_serial = int(xml.get("serial")) + print "Unpacking deltas version %s session %s serial %s" % ( + xml.get("version"), xml.get("session_id"), new_serial) + if old_serial != new_serial - 1: + raise RuntimeError("Can't apply deltas: old serial %s new serial %s" % (old_serial, new_serial)) + for i, elt in enumerate(xml.iterchildren(tags.withdraw)): + uri = elt.get("uri") + hash = elt.get("hash") + print " %3d withdraw URI %s hash %s" % (i, uri, hash) + self.del_obj(uri, hash) + for i, elt in enumerate(xml.iterchildren(tags.publish)): + uri = elt.get("uri") + hash = elt.get("hash", None) + print " %3d publish URI %s hash %s" % (i, uri, hash) + if hash is not None: + self.del_obj(uri, hash) + self.add_obj(elt.get("uri"), elt.text.decode("base64")) + self.set_serial(new_serial) + +if __name__ == "__main__": + main() diff --git a/potpourri/show-key-identifiers.py b/potpourri/show-key-identifiers.py index fa2bae8b..4ba6219a 100644 --- a/potpourri/show-key-identifiers.py +++ b/potpourri/show-key-identifiers.py @@ -29,26 +29,26 @@ import rpki.oids def check_dir(s): - if os.path.isdir(s): - return os.path.abspath(s) - else: - raise argparse.ArgumentTypeError("%r is not a directory" % s) + if os.path.isdir(s): + return os.path.abspath(s) + else: + raise argparse.ArgumentTypeError("%r is not a directory" % s) def filename_to_uri(filename): - if not filename.startswith(args.rcynic_dir): - raise ValueError - return "rsync://" + filename[len(args.rcynic_dir):].lstrip("/") + if not filename.startswith(args.rcynic_dir): + raise ValueError + return "rsync://" + filename[len(args.rcynic_dir):].lstrip("/") def get_roa(fn): - return rpki.POW.CMS.derReadFile(fn).certs()[0] + return rpki.POW.CMS.derReadFile(fn).certs()[0] def get_crl(fn): - return rpki.POW.CRL.derReadFile(fn) + return rpki.POW.CRL.derReadFile(fn) def get_cer(fn): - return rpki.POW.X509.derReadFile(fn) + return rpki.POW.X509.derReadFile(fn) dispatch = dict(roa = get_roa, crl = get_crl, @@ -59,23 +59,23 @@ parser.add_argument("rcynic_dir", type = check_dir, help = "rcynic authenticated args = parser.parse_args() for root, dirs, files in os.walk(args.rcynic_dir): - for fn in files: - fn = os.path.join(root, fn) - fn2 = os.path.splitext(fn)[1][1:] - if fn2 not in dispatch: - continue - obj = dispatch[fn2](fn) - uri = filename_to_uri(fn) - try: - ski = obj.getSKI().encode("hex") - except: - ski = "" - try: - aki = obj.getAKI().encode("hex") - except: - aki = "" - try: - res = ",".join(",".join("%s-%s" % r2 for r2 in r1) for r1 in obj.getRFC3779() if r1 is not None) - except: - res = "" - print "\t".join((uri, ski, aki, res)) + for fn in files: + fn = os.path.join(root, fn) + fn2 = os.path.splitext(fn)[1][1:] + if fn2 not in dispatch: + continue + obj = dispatch[fn2](fn) + uri = filename_to_uri(fn) + try: + ski = obj.getSKI().encode("hex") + except: + ski = "" + try: + aki = obj.getAKI().encode("hex") + except: + aki = "" + try: + res = ",".join(",".join("%s-%s" % r2 for r2 in r1) for r1 in obj.getRFC3779() if r1 is not None) + except: + res = "" + print "\t".join((uri, ski, aki, res)) diff --git a/potpourri/show-tracking-data.py b/potpourri/show-tracking-data.py index 07e0a144..0fbb26c1 100644 --- a/potpourri/show-tracking-data.py +++ b/potpourri/show-tracking-data.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -31,9 +31,9 @@ import rpki.sundial rcynic_dir = sys.argv[1] for root, dirs, files in os.walk(rcynic_dir): - for f in files: - path = os.path.join(root, f) - date = rpki.sundial.datetime.utcfromtimestamp(os.stat(path).st_mtime) - uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") - obj = rpki.x509.uri_dispatch(uri)(DER_file = path) - print date, obj.tracking_data(uri) + for f in files: + path = os.path.join(root, f) + date = rpki.sundial.datetime.utcfromtimestamp(os.stat(path).st_mtime) + uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") + obj = rpki.x509.uri_dispatch(uri)(DER_file = path) + print date, obj.tracking_data(uri) diff --git a/potpourri/signed-object-dates.py b/potpourri/signed-object-dates.py index fefd9448..d5699252 100644 --- a/potpourri/signed-object-dates.py +++ b/potpourri/signed-object-dates.py @@ -5,11 +5,11 @@ # if the object is a manifest, also extract thisUpdate and nextUpdate. # Copyright (C) 2013 Dragon Research Labs ("DRL") -# +# # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -27,14 +27,14 @@ extract_flags = (rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_CONTENT_VERIFY) for fn in sys.argv[1:]: - cls = rpki.POW.Manifest if fn.endswith(".mft") else rpki.POW.CMS - cms = cls.derReadFile(fn) - cer = cms.certs()[0] - print fn - print " notBefore: ", cer.getNotBefore() - if fn.endswith(".mft"): - cms.verify(rpki.POW.X509Store(), None, extract_flags) - print " thisUpdate:", cms.getThisUpdate() - print " nextUpdate:", cms.getNextUpdate() - print " notAfter: ", cer.getNotAfter() - print + cls = rpki.POW.Manifest if fn.endswith(".mft") else rpki.POW.CMS + cms = cls.derReadFile(fn) + cer = cms.certs()[0] + print fn + print " notBefore: ", cer.getNotBefore() + if fn.endswith(".mft"): + cms.verify(rpki.POW.X509Store(), None, extract_flags) + print " thisUpdate:", cms.getThisUpdate() + print " nextUpdate:", cms.getNextUpdate() + print " notAfter: ", cer.getNotAfter() + print diff --git a/potpourri/testbed-rootcert.py b/potpourri/testbed-rootcert.py index 0716be2f..5e2e97c5 100644 --- a/potpourri/testbed-rootcert.py +++ b/potpourri/testbed-rootcert.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -26,7 +26,7 @@ import sys from rpki.csv_utils import csv_reader if len(sys.argv) not in (2, 4): - sys.exit("Usage: %s holder [asns.csv prefixes.csv]" % sys.argv[0]) + sys.exit("Usage: %s holder [asns.csv prefixes.csv]" % sys.argv[0]) print '''\ [req] @@ -53,7 +53,7 @@ sbgp-ipAddrBlock = critical,@rfc3997_addrs "HOLDER" : sys.argv[1].upper() } for i, asn in enumerate(asn for handle, asn in csv_reader(sys.argv[2] if len(sys.argv) > 2 else "asns.csv", columns = 2)): - print "AS.%d = %s" % (i, asn) + print "AS.%d = %s" % (i, asn) print '''\ @@ -62,5 +62,5 @@ print '''\ ''' for i, prefix in enumerate(prefix for handle, prefix in csv_reader(sys.argv[3] if len(sys.argv) > 2 else "prefixes.csv", columns = 2)): - v = 6 if ":" in prefix else 4 - print "IPv%d.%d = %s" % (v, i, prefix) + v = 6 if ":" in prefix else 4 + print "IPv%d.%d = %s" % (v, i, prefix) diff --git a/potpourri/translate-handles.py b/potpourri/translate-handles.py index 49848277..124604e6 100644 --- a/potpourri/translate-handles.py +++ b/potpourri/translate-handles.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -39,11 +39,11 @@ translations = dict((src, dst) for src, dst in csv_reader("translations.csv", co for filename in sys.argv[1:]: - f = csv_writer(filename) + f = csv_writer(filename) - for cols in csv_reader(filename): - if cols[0] in translations: - cols[0] = translations[cols[0]] - f.writerow(cols) + for cols in csv_reader(filename): + if cols[0] in translations: + cols[0] = translations[cols[0]] + f.writerow(cols) - f.close() + f.close() diff --git a/potpourri/upgrade-add-ghostbusters.py b/potpourri/upgrade-add-ghostbusters.py index a8c8a92b..2370e959 100644 --- a/potpourri/upgrade-add-ghostbusters.py +++ b/potpourri/upgrade-add-ghostbusters.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -24,26 +24,26 @@ import getopt, sys, rpki.config, warnings from rpki.mysql_import import MySQLdb def fix(name, *statements): - db = MySQLdb.connect(db = cfg.get("sql-database", section = name), - user = cfg.get("sql-username", section = name), - passwd = cfg.get("sql-password", section = name)) - cur = db.cursor() - for statement in statements: - cur.execute(statement) - db.commit() - db.close() + db = MySQLdb.connect(db = cfg.get("sql-database", section = name), + user = cfg.get("sql-username", section = name), + passwd = cfg.get("sql-password", section = name)) + cur = db.cursor() + for statement in statements: + cur.execute(statement) + db.commit() + db.close() cfg_file = None opts, argv = getopt.getopt(sys.argv[1:], "c:h?", ["config=", "help"]) for o, a in opts: - if o in ("-h", "--help", "-?"): - print __doc__ - sys.exit(0) - if o in ("-c", "--config"): - cfg_file = a + if o in ("-h", "--help", "-?"): + print __doc__ + sys.exit(0) + if o in ("-c", "--config"): + cfg_file = a -cfg = rpki.config.parser(cfg_file, "myrpki") +cfg = rpki.config.parser(filename = cfg_file, section = "myrpki") fix("irdbd", """ CREATE TABLE ghostbuster_request ( diff --git a/potpourri/validation-status-sql.py b/potpourri/validation-status-sql.py index fc52e64b..646d7d9b 100755 --- a/potpourri/validation-status-sql.py +++ b/potpourri/validation-status-sql.py @@ -34,186 +34,185 @@ import subprocess class Parser(object): - @staticmethod - def main(): - parser = argparse.ArgumentParser( - description = __doc__, - formatter_class = argparse.ArgumentDefaultsHelpFormatter) - group = parser.add_mutually_exclusive_group(required = True) - group.add_argument("--mailbox", "--mb", - help = "Maildir mailbox containing rcynic XML output") - group.add_argument("--tarballs", - help = "directory tree of tar files containing containing rcynic XML output") - parser.add_argument("--database", "--db", - default = "validation-status-sql.db", - help = "SQLite3 database") - parser.add_argument("--path-within-tarball", - default = "var/rcynic/data/rcynic.xml", - help = "rcynic.xml path name within tarball(s)") - parser.add_argument("--tar-extensions", nargs = "+", - default = ".tar .tar.gz .tgz .tar.bz2 .tbz .tar.xz .txz".split(), - help = "extensions to recognize as indicating tar files") - args = parser.parse_args() - if args.mailbox: - ParserMailbox(args) - else: - ParserTarball(args) - - def __init__(self, args): - self.args = args - self.init_sql() - self.init_hook() - self.index1() - self.parsed = 1 - for self.current, self.iterval in enumerate(self.iterator, 1): - self.parse_xml() - if self.parsed > 1: - sys.stderr.write("\n") - self.index2() - self.db.close() - - - def init_sql(self): - creating = not os.path.exists(self.args.database) - self.db = sqlite3.connect(self.args.database) - self.db.text_factory = str - self.db.executescript(''' - PRAGMA foreign_keys = off; - PRAGMA synchronous = off; - PRAGMA count_changes = off; - ''') - - if creating: - self.db.executescript(''' - CREATE TABLE sessions ( - session_id INTEGER PRIMARY KEY NOT NULL, - session DATETIME NOT NULL, - handle TEXT NOT NULL - ); - - CREATE TABLE uris ( - uri_id INTEGER PRIMARY KEY NOT NULL, - uri TEXT NOT NULL - ); - - CREATE TABLE codes ( - code_id INTEGER PRIMARY KEY NOT NULL, - code TEXT NOT NULL - ); - - CREATE TABLE generations ( - generation_id INTEGER PRIMARY KEY NOT NULL, - generation TEXT NOT NULL - ); - - CREATE TABLE events ( - id INTEGER PRIMARY KEY NOT NULL, - timestamp DATETIME NOT NULL, - session_id INTEGER NOT NULL REFERENCES sessions (session_id) ON DELETE RESTRICT ON UPDATE RESTRICT, - generation_id INTEGER NOT NULL REFERENCES generations (generation_id) ON DELETE RESTRICT ON UPDATE RESTRICT, - code_id INTEGER NOT NULL REFERENCES codes (code_id) ON DELETE RESTRICT ON UPDATE RESTRICT, - uri_id INTEGER NOT NULL REFERENCES uris (uri_id) ON DELETE RESTRICT ON UPDATE RESTRICT - ); - - CREATE VIEW status AS - SELECT id, handle, session, timestamp, generation, code, uri - FROM events - NATURAL JOIN sessions - NATURAL JOIN uris - NATURAL JOIN codes - NATURAL JOIN generations; - ''') - - - def index1(self): - self.db.executescript(''' - CREATE UNIQUE INDEX IF NOT EXISTS sessions_index ON sessions (session); - CREATE UNIQUE INDEX IF NOT EXISTS handles_index ON sessions (handle); - CREATE UNIQUE INDEX IF NOT EXISTS uris_index ON uris (uri); - CREATE UNIQUE INDEX IF NOT EXISTS codes_index ON codes (code); - CREATE UNIQUE INDEX IF NOT EXISTS generations_index ON generations (generation); - ''') - - - def index2(self): - self.db.executescript(''' - CREATE UNIQUE INDEX IF NOT EXISTS events_index ON events (uri_id, timestamp, code_id, generation_id); - ''') - - - def string_id(self, table, value): - field = table.rstrip("s") - try: - return self.db.execute("SELECT %s_id FROM %s WHERE %s = ?" % (field, table, field), (value,)).fetchone()[0] - except: - return self.db.execute("INSERT INTO %s (%s) VALUES (?)" % (table, field), (value,)).lastrowid - - - def parse_xml(self): - sys.stderr.write("\r%s %d/%d/%d...%s " % ("|\\-/"[self.current & 3], - self.current, self.parsed, self.total, self.handle)) - if self.db.execute("SELECT handle FROM sessions WHERE handle = ?", (self.handle,)).fetchone(): - return - xml = self.read_xml() - with self.db: - session_id = self.db.execute("INSERT INTO sessions (session, handle) VALUES (strftime('%s', ?), ?)", - (xml.get("date"), self.handle)).lastrowid - self.db.executemany("INSERT INTO events (session_id, timestamp, generation_id, code_id, uri_id) " - "VALUES (?, strftime('%s', ?), ?, ?, ?)", - ((session_id, - x.get("timestamp"), - self.string_id("generations", x.get("generation", "none")), - self.string_id("codes", x.get("status")), - self.string_id("uris", x.text.strip())) - for x in xml.findall("validation_status"))) - self.parsed += 1 + @staticmethod + def main(): + parser = argparse.ArgumentParser( + description = __doc__, + formatter_class = argparse.ArgumentDefaultsHelpFormatter) + group = parser.add_mutually_exclusive_group(required = True) + group.add_argument("--mailbox", "--mb", + help = "Maildir mailbox containing rcynic XML output") + group.add_argument("--tarballs", + help = "directory tree of tar files containing containing rcynic XML output") + parser.add_argument("--database", "--db", + default = "validation-status-sql.db", + help = "SQLite3 database") + parser.add_argument("--path-within-tarball", + default = "var/rcynic/data/rcynic.xml", + help = "rcynic.xml path name within tarball(s)") + parser.add_argument("--tar-extensions", nargs = "+", + default = ".tar .tar.gz .tgz .tar.bz2 .tbz .tar.xz .txz".split(), + help = "extensions to recognize as indicating tar files") + args = parser.parse_args() + if args.mailbox: + ParserMailbox(args) + else: + ParserTarball(args) + + def __init__(self, args): + self.args = args + self.init_sql() + self.init_hook() + self.index1() + self.parsed = 1 + for self.current, self.iterval in enumerate(self.iterator, 1): + self.parse_xml() + if self.parsed > 1: + sys.stderr.write("\n") + self.index2() + self.db.close() + + + def init_sql(self): + creating = not os.path.exists(self.args.database) + self.db = sqlite3.connect(self.args.database) + self.db.text_factory = str + self.db.executescript(''' + PRAGMA foreign_keys = off; + PRAGMA synchronous = off; + PRAGMA count_changes = off; + ''') + + if creating: + self.db.executescript(''' + CREATE TABLE sessions ( + session_id INTEGER PRIMARY KEY NOT NULL, + session DATETIME NOT NULL, + handle TEXT NOT NULL + ); + + CREATE TABLE uris ( + uri_id INTEGER PRIMARY KEY NOT NULL, + uri TEXT NOT NULL + ); + + CREATE TABLE codes ( + code_id INTEGER PRIMARY KEY NOT NULL, + code TEXT NOT NULL + ); + + CREATE TABLE generations ( + generation_id INTEGER PRIMARY KEY NOT NULL, + generation TEXT NOT NULL + ); + + CREATE TABLE events ( + id INTEGER PRIMARY KEY NOT NULL, + timestamp DATETIME NOT NULL, + session_id INTEGER NOT NULL REFERENCES sessions (session_id) ON DELETE RESTRICT ON UPDATE RESTRICT, + generation_id INTEGER NOT NULL REFERENCES generations (generation_id) ON DELETE RESTRICT ON UPDATE RESTRICT, + code_id INTEGER NOT NULL REFERENCES codes (code_id) ON DELETE RESTRICT ON UPDATE RESTRICT, + uri_id INTEGER NOT NULL REFERENCES uris (uri_id) ON DELETE RESTRICT ON UPDATE RESTRICT + ); + + CREATE VIEW status AS + SELECT id, handle, session, timestamp, generation, code, uri + FROM events + NATURAL JOIN sessions + NATURAL JOIN uris + NATURAL JOIN codes + NATURAL JOIN generations; + ''') + + + def index1(self): + self.db.executescript(''' + CREATE UNIQUE INDEX IF NOT EXISTS sessions_index ON sessions (session); + CREATE UNIQUE INDEX IF NOT EXISTS handles_index ON sessions (handle); + CREATE UNIQUE INDEX IF NOT EXISTS uris_index ON uris (uri); + CREATE UNIQUE INDEX IF NOT EXISTS codes_index ON codes (code); + CREATE UNIQUE INDEX IF NOT EXISTS generations_index ON generations (generation); + ''') + + + def index2(self): + self.db.executescript(''' + CREATE UNIQUE INDEX IF NOT EXISTS events_index ON events (uri_id, timestamp, code_id, generation_id); + ''') + + + def string_id(self, table, value): + field = table.rstrip("s") + try: + return self.db.execute("SELECT %s_id FROM %s WHERE %s = ?" % (field, table, field), (value,)).fetchone()[0] + except: + return self.db.execute("INSERT INTO %s (%s) VALUES (?)" % (table, field), (value,)).lastrowid + + + def parse_xml(self): + sys.stderr.write("\r%s %d/%d/%d...%s " % ("|\\-/"[self.current & 3], + self.current, self.parsed, self.total, self.handle)) + if self.db.execute("SELECT handle FROM sessions WHERE handle = ?", (self.handle,)).fetchone(): + return + xml = self.read_xml() + with self.db: + session_id = self.db.execute("INSERT INTO sessions (session, handle) VALUES (strftime('%s', ?), ?)", + (xml.get("date"), self.handle)).lastrowid + self.db.executemany("INSERT INTO events (session_id, timestamp, generation_id, code_id, uri_id) " + "VALUES (?, strftime('%s', ?), ?, ?, ?)", + ((session_id, + x.get("timestamp"), + self.string_id("generations", x.get("generation", "none")), + self.string_id("codes", x.get("status")), + self.string_id("uris", x.text.strip())) + for x in xml.findall("validation_status"))) + self.parsed += 1 class ParserTarball(Parser): - def init_hook(self): - self.total = 0 - for fn in self.iter_tarball_names(): - self.total += 1 - self.iterator = self.iter_tarball_names() + def init_hook(self): + self.total = 0 + for fn in self.iter_tarball_names(): + self.total += 1 + self.iterator = self.iter_tarball_names() - @property - def handle(self): - return self.iterval + @property + def handle(self): + return self.iterval - def read_xml(self): - return lxml.etree.ElementTree( - file = subprocess.Popen(("tar", "Oxf", self.iterval, self.args.path_within_tarball), - stdout = subprocess.PIPE).stdout).getroot() + def read_xml(self): + return lxml.etree.ElementTree( + file = subprocess.Popen(("tar", "Oxf", self.iterval, self.args.path_within_tarball), + stdout = subprocess.PIPE).stdout).getroot() - def iter_tarball_names(self): - if os.path.isdir(self.args.tarballs): - for root, dirs, files in os.walk(self.args.tarballs): - for fn in files: - if any(fn.endswith(ext) for ext in self.args.tar_extensions): - yield os.path.join(root, fn) - else: - yield self.args.tarballs + def iter_tarball_names(self): + if os.path.isdir(self.args.tarballs): + for root, dirs, files in os.walk(self.args.tarballs): + for fn in files: + if any(fn.endswith(ext) for ext in self.args.tar_extensions): + yield os.path.join(root, fn) + else: + yield self.args.tarballs class ParserMailbox(Parser): - def init_hook(self): - self.mb = mailbox.Maildir(self.args.mailbox, factory = None, create = False) - self.total = len(self.mb) - self.iterator = self.mb.iterkeys() + def init_hook(self): + self.mb = mailbox.Maildir(self.args.mailbox, factory = None, create = False) + self.total = len(self.mb) + self.iterator = self.mb.iterkeys() - @property - def handle(self): - return self.mb[self.iterval].get("Message-ID") + @property + def handle(self): + return self.mb[self.iterval].get("Message-ID") - def read_xml(self): - return lxml.etree.XML(self.mb[self.iterval].get_payload()) + def read_xml(self): + return lxml.etree.XML(self.mb[self.iterval].get_payload()) if __name__ == "__main__": - try: - Parser.main() - except KeyboardInterrupt: - pass - + try: + Parser.main() + except KeyboardInterrupt: + pass diff --git a/potpourri/whack-ripe-asns.py b/potpourri/whack-ripe-asns.py index 9c702271..ed4a6451 100644 --- a/potpourri/whack-ripe-asns.py +++ b/potpourri/whack-ripe-asns.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -44,14 +44,14 @@ sorter = subprocess.Popen(("sort", "-T.", "-n"), stdout = subprocess.PIPE) for line in sys.stdin: - handle, asn = line.split() + handle, asn = line.split() - if "-" in asn: - range_min, range_max = asn.split("-") - else: - range_min, range_max = asn, asn + if "-" in asn: + range_min, range_max = asn.split("-") + else: + range_min, range_max = asn, asn - sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) + sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) sorter.stdin.close() @@ -59,22 +59,22 @@ prev_min = None prev_max = None def show(): - if prev_min and prev_max: - sys.stdout.write("x\t%s-%s\n" % (prev_min, prev_max)) + if prev_min and prev_max: + sys.stdout.write("x\t%s-%s\n" % (prev_min, prev_max)) for line in sorter.stdout: - this_min, this_max = line.split() - this_min = long(this_min) - this_max = long(this_max) - - if prev_min and prev_max and prev_max + 1 >= this_min: - prev_min = min(prev_min, this_min) - prev_max = max(prev_max, this_max) - - else: - show() - prev_min = this_min - prev_max = this_max + this_min, this_max = line.split() + this_min = long(this_min) + this_max = long(this_max) + + if prev_min and prev_max and prev_max + 1 >= this_min: + prev_min = min(prev_min, this_min) + prev_max = max(prev_max, this_max) + + else: + show() + prev_min = this_min + prev_max = this_max show() diff --git a/potpourri/whack-ripe-prefixes.py b/potpourri/whack-ripe-prefixes.py index 52ea3f18..b3d9c39d 100644 --- a/potpourri/whack-ripe-prefixes.py +++ b/potpourri/whack-ripe-prefixes.py @@ -1,11 +1,11 @@ # $Id$ -# +# # Copyright (C) 2010 Internet Systems Consortium ("ISC") -# +# # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. -# +# # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, @@ -49,21 +49,21 @@ sorter = subprocess.Popen(("sort", "-T.", "-n"), stdout = subprocess.PIPE) for line in sys.stdin: - handle, prefix = line.split() + handle, prefix = line.split() - if "-" in prefix: - range_min, range_max = prefix.split("-") - range_min = rpki.ipaddrs.parse(range_min) - range_max = rpki.ipaddrs.parse(range_max) + if "-" in prefix: + range_min, range_max = prefix.split("-") + range_min = rpki.ipaddrs.parse(range_min) + range_max = rpki.ipaddrs.parse(range_max) - else: - address, length = prefix.split("/") - address = rpki.ipaddrs.parse(address) - mask = (1L << (address.bits - int(length))) - 1 - range_min = address & ~mask - range_max = address | mask + else: + address, length = prefix.split("/") + address = rpki.ipaddrs.parse(address) + mask = (1L << (address.bits - int(length))) - 1 + range_min = address & ~mask + range_max = address | mask - sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) + sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) sorter.stdin.close() @@ -71,28 +71,28 @@ prev_min = None prev_max = None def address(number): - if number > 0xffffffff: - return rpki.ipaddrs.v6addr(number) - else: - return rpki.ipaddrs.v4addr(number) + if number > 0xffffffff: + return rpki.ipaddrs.v6addr(number) + else: + return rpki.ipaddrs.v4addr(number) def show(): - if prev_min and prev_max: - sys.stdout.write("x\t%s-%s\n" % (address(prev_min), address(prev_max))) + if prev_min and prev_max: + sys.stdout.write("x\t%s-%s\n" % (address(prev_min), address(prev_max))) for line in sorter.stdout: - this_min, this_max = line.split() - this_min = long(this_min) - this_max = long(this_max) - - if prev_min and prev_max and prev_max + 1 >= this_min: - prev_min = min(prev_min, this_min) - prev_max = max(prev_max, this_max) - - else: - show() - prev_min = this_min - prev_max = this_max + this_min, this_max = line.split() + this_min = long(this_min) + this_max = long(this_max) + + if prev_min and prev_max and prev_max + 1 >= this_min: + prev_min = min(prev_min, this_min) + prev_max = max(prev_max, this_max) + + else: + show() + prev_min = this_min + prev_max = this_max show() diff --git a/potpourri/x509-dot.py b/potpourri/x509-dot.py index 42e1543a..493199fd 100644 --- a/potpourri/x509-dot.py +++ b/potpourri/x509-dot.py @@ -36,123 +36,123 @@ import rpki.POW, sys, glob, os class x509(object): - ski = None - aki = None + ski = None + aki = None - show_file = False - show_ski = False - show_aki = False - show_issuer = True - show_subject = True + show_file = False + show_ski = False + show_aki = False + show_issuer = True + show_subject = True - cn_only = True + cn_only = True - subjects = {} + subjects = {} - def __init__(self, filename): + def __init__(self, filename): - while filename.startswith("./"): - filename = filename[2:] + while filename.startswith("./"): + filename = filename[2:] - self.filename = filename + self.filename = filename - f = open(filename, "rb") - text = f.read() - f.close() + f = open(filename, "rb") + text = f.read() + f.close() - if "-----BEGIN" in text: - self.pow = rpki.POW.X509.pemRead(text) - else: - self.pow = rpki.POW.X509.derRead(text) + if "-----BEGIN" in text: + self.pow = rpki.POW.X509.pemRead(text) + else: + self.pow = rpki.POW.X509.derRead(text) - try: - self.ski = ":".join(["%02X" % ord(i) for i in self.pow.getSKI()]) - except: - pass + try: + self.ski = ":".join(["%02X" % ord(i) for i in self.pow.getSKI()]) + except: + pass - try: - self.aki = ":".join(["%02X" % ord(i) for i in self.pow.getAKI()]) - except: - pass + try: + self.aki = ":".join(["%02X" % ord(i) for i in self.pow.getAKI()]) + except: + pass - self.subject = self.canonize(self.pow.getSubject()) - self.issuer = self.canonize(self.pow.getIssuer()) + self.subject = self.canonize(self.pow.getSubject()) + self.issuer = self.canonize(self.pow.getIssuer()) - if self.subject in self.subjects: - self.subjects[self.subject].append(self) - else: - self.subjects[self.subject] = [self] + if self.subject in self.subjects: + self.subjects[self.subject].append(self) + else: + self.subjects[self.subject] = [self] - def canonize(self, name): + def canonize(self, name): - # Probably should just use rpki.x509.X501DN class here. + # Probably should just use rpki.x509.X501DN class here. - try: - if self.cn_only and name[0][0][0] == "2.5.4.3": - return name[0][0][1] - except: - pass + try: + if self.cn_only and name[0][0][0] == "2.5.4.3": + return name[0][0][1] + except: + pass - return name + return name - def set_node(self, node): + def set_node(self, node): - self.node = node + self.node = node - def dot(self): + def dot(self): - label = [] + label = [] - if self.show_issuer: - label.append(("Issuer", self.issuer)) + if self.show_issuer: + label.append(("Issuer", self.issuer)) - if self.show_subject: - label.append(("Subject", self.subject)) + if self.show_subject: + label.append(("Subject", self.subject)) - if self.show_file: - label.append(("File", self.filename)) + if self.show_file: + label.append(("File", self.filename)) - if self.show_aki: - label.append(("AKI", self.aki)) + if self.show_aki: + label.append(("AKI", self.aki)) - if self.show_ski: - label.append(("SKI", self.ski)) + if self.show_ski: + label.append(("SKI", self.ski)) - print "#", repr(label) + print "#", repr(label) - if len(label) > 1: - print '%s [shape = record, label = "{%s}"];' % (self.node, "|".join("{%s|%s}" % (x, y) for x, y in label if y is not None)) - else: - print '%s [label = "%s"];' % (self.node, label[0][1]) + if len(label) > 1: + print '%s [shape = record, label = "{%s}"];' % (self.node, "|".join("{%s|%s}" % (x, y) for x, y in label if y is not None)) + else: + print '%s [label = "%s"];' % (self.node, label[0][1]) - for issuer in self.subjects.get(self.issuer, ()): + for issuer in self.subjects.get(self.issuer, ()): - if issuer is self: - print "# Issuer is self" - issuer = None + if issuer is self: + print "# Issuer is self" + issuer = None - if issuer is not None and self.aki is not None and self.ski is not None and self.aki == self.ski: - print "# Self-signed" - issuer = None + if issuer is not None and self.aki is not None and self.ski is not None and self.aki == self.ski: + print "# Self-signed" + issuer = None - if issuer is not None and self.aki is not None and issuer.ski is not None and self.aki != issuer.ski: - print "# AKI does not match issuer SKI" - issuer = None + if issuer is not None and self.aki is not None and issuer.ski is not None and self.aki != issuer.ski: + print "# AKI does not match issuer SKI" + issuer = None - if issuer is not None: - print "%s -> %s;" % (issuer.node, self.node) + if issuer is not None: + print "%s -> %s;" % (issuer.node, self.node) - print + print certs = [] for topdir in sys.argv[1:] or ["."]: - for dirpath, dirnames, filenames in os.walk(topdir): - certs += [x509(dirpath + "/" + filename) for filename in filenames if filename.endswith(".cer")] + for dirpath, dirnames, filenames in os.walk(topdir): + certs += [x509(dirpath + "/" + filename) for filename in filenames if filename.endswith(".cer")] for i, cert in enumerate(certs): - cert.set_node("cert_%d" % i) + cert.set_node("cert_%d" % i) print """\ digraph certificates { @@ -165,6 +165,6 @@ ratio = fill; """ for cert in certs: - cert.dot() + cert.dot() print "}" |