aboutsummaryrefslogtreecommitdiff
path: root/rp
diff options
context:
space:
mode:
Diffstat (limited to 'rp')
-rwxr-xr-xrp/rcynic/rcynic-cron90
-rwxr-xr-xrp/rcynic/rcynic-html932
-rwxr-xr-xrp/rcynic/rcynic-svn106
-rwxr-xr-xrp/rcynic/rcynic-text144
-rw-r--r--rp/rcynic/rpki-torrent.py1166
-rwxr-xr-xrp/rcynic/validation_status16
-rwxr-xr-xrp/rpki-rtr/rpki-rtr4
-rwxr-xr-xrp/utils/find_roa202
-rwxr-xr-xrp/utils/hashdir50
-rwxr-xr-xrp/utils/print_roa70
-rwxr-xr-xrp/utils/print_rpki_manifest34
-rwxr-xr-xrp/utils/scan_roas44
-rwxr-xr-xrp/utils/scan_routercerts28
-rwxr-xr-xrp/utils/uri70
14 files changed, 1478 insertions, 1478 deletions
diff --git a/rp/rcynic/rcynic-cron b/rp/rcynic/rcynic-cron
index 53bfea9f..3d38726c 100755
--- a/rp/rcynic/rcynic-cron
+++ b/rp/rcynic/rcynic-cron
@@ -35,34 +35,34 @@ import argparse
import rpki.autoconf
def run(*cmd, **kwargs):
- chroot_this = kwargs.pop("chroot_this", False)
- cwd = kwargs.pop("cwd", None)
- pid = os.fork()
- if pid == 0:
- if chroot_this:
- os.chdir(rpki.autoconf.RCYNIC_DIR)
- elif cwd is not None:
- os.chdir(cwd)
- if we_are_root:
- os.initgroups(pw.pw_name, pw.pw_gid)
- if chroot_this:
- os.chroot(rpki.autoconf.RCYNIC_DIR)
- if we_are_root:
- os.setgid(pw.pw_gid)
- os.setuid(pw.pw_uid)
- os.closerange(3, os.sysconf("SC_OPEN_MAX"))
- os.execvp(cmd[0], cmd)
- os._exit(1)
- else:
- status = os.waitpid(pid, 0)[1]
- if status == 0:
- return
- elif os.WIFSIGNALED(status):
- sys.exit("Process %s exited with signal %s" % (" ".join(cmd), os.WTERMSIG(status)))
- elif os.WIFEXITED(status):
- sys.exit("Program %s exited with status %s" % (" ".join(cmd), os.WEXITSTATUS(status)))
+ chroot_this = kwargs.pop("chroot_this", False)
+ cwd = kwargs.pop("cwd", None)
+ pid = os.fork()
+ if pid == 0:
+ if chroot_this:
+ os.chdir(rpki.autoconf.RCYNIC_DIR)
+ elif cwd is not None:
+ os.chdir(cwd)
+ if we_are_root:
+ os.initgroups(pw.pw_name, pw.pw_gid)
+ if chroot_this:
+ os.chroot(rpki.autoconf.RCYNIC_DIR)
+ if we_are_root:
+ os.setgid(pw.pw_gid)
+ os.setuid(pw.pw_uid)
+ os.closerange(3, os.sysconf("SC_OPEN_MAX"))
+ os.execvp(cmd[0], cmd)
+ os._exit(1)
else:
- sys.exit("Program %s exited for unknown reason %s" % (" ".join(cmd), status))
+ status = os.waitpid(pid, 0)[1]
+ if status == 0:
+ return
+ elif os.WIFSIGNALED(status):
+ sys.exit("Process %s exited with signal %s" % (" ".join(cmd), os.WTERMSIG(status)))
+ elif os.WIFEXITED(status):
+ sys.exit("Program %s exited with status %s" % (" ".join(cmd), os.WEXITSTATUS(status)))
+ else:
+ sys.exit("Program %s exited for unknown reason %s" % (" ".join(cmd), status))
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument("--chroot", action = "store_true", help = "run chrooted")
@@ -71,29 +71,29 @@ args = parser.parse_args()
we_are_root = os.getuid() == 0
if args.chroot and not we_are_root:
- sys.exit("Only root can --chroot")
+ sys.exit("Only root can --chroot")
try:
- pw = pwd.getpwnam(rpki.autoconf.RCYNIC_USER)
+ pw = pwd.getpwnam(rpki.autoconf.RCYNIC_USER)
except KeyError:
- sys.exit("Could not find passwd entry for user %s" % rpki.autoconf.RCYNIC_USER)
+ sys.exit("Could not find passwd entry for user %s" % rpki.autoconf.RCYNIC_USER)
try:
- lock = os.open(os.path.join(rpki.autoconf.RCYNIC_DIR, "data/lock"),
- os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666)
- fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
- if we_are_root:
- os.fchown(lock, pw.pw_uid, pw.pw_gid)
+ lock = os.open(os.path.join(rpki.autoconf.RCYNIC_DIR, "data/lock"),
+ os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666)
+ fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ if we_are_root:
+ os.fchown(lock, pw.pw_uid, pw.pw_gid)
except (IOError, OSError), e:
- if e.errno == errno.EAGAIN:
- sys.exit(0) # Another instance of this script is already running, exit silently
- else:
- sys.exit("Error %r opening lock %r" % (e.strerror, os.path.join(rpki.autoconf.RCYNIC_DIR, "data/lock")))
+ if e.errno == errno.EAGAIN:
+ sys.exit(0) # Another instance of this script is already running, exit silently
+ else:
+ sys.exit("Error %r opening lock %r" % (e.strerror, os.path.join(rpki.autoconf.RCYNIC_DIR, "data/lock")))
if args.chroot:
- run("/bin/rcynic", "-c", "/etc/rcynic.conf", chroot_this = True)
+ run("/bin/rcynic", "-c", "/etc/rcynic.conf", chroot_this = True)
else:
- run(os.path.join(rpki.autoconf.bindir, "rcynic"), "-c", os.path.join(rpki.autoconf.sysconfdir, "rcynic.conf"))
+ run(os.path.join(rpki.autoconf.bindir, "rcynic"), "-c", os.path.join(rpki.autoconf.sysconfdir, "rcynic.conf"))
run(os.path.join(rpki.autoconf.bindir, "rpki-rtr"),
"cronjob",
@@ -102,9 +102,9 @@ run(os.path.join(rpki.autoconf.bindir, "rpki-rtr"),
prog = os.path.join(rpki.autoconf.libexecdir, "rpkigui-rcynic")
if os.path.exists(prog):
- run(prog)
+ run(prog)
if rpki.autoconf.RCYNIC_HTML_DIR and os.path.exists(os.path.dirname(rpki.autoconf.RCYNIC_HTML_DIR)):
- run(os.path.join(rpki.autoconf.bindir, "rcynic-html"),
- os.path.join(rpki.autoconf.RCYNIC_DIR, "data/rcynic.xml"),
- rpki.autoconf.RCYNIC_HTML_DIR)
+ run(os.path.join(rpki.autoconf.bindir, "rcynic-html"),
+ os.path.join(rpki.autoconf.RCYNIC_DIR, "data/rcynic.xml"),
+ rpki.autoconf.RCYNIC_HTML_DIR)
diff --git a/rp/rcynic/rcynic-html b/rp/rcynic/rcynic-html
index ef566440..012bccad 100755
--- a/rp/rcynic/rcynic-html
+++ b/rp/rcynic/rcynic-html
@@ -32,361 +32,361 @@ import copy
import rpki.autoconf
try:
- from lxml.etree import (ElementTree, Element, SubElement, Comment)
+ from lxml.etree import (ElementTree, Element, SubElement, Comment)
except ImportError:
- from xml.etree.ElementTree import (ElementTree, Element, SubElement, Comment)
+ from xml.etree.ElementTree import (ElementTree, Element, SubElement, Comment)
session = None
args = None
def parse_options():
- global args
-
- parser = argparse.ArgumentParser(description = __doc__)
- parser.add_argument("--refresh", type = int, default = 1800,
- help = "refresh interval for generated HTML")
- parser.add_argument("--hide-problems", action = "store_true",
- help = "don't generate \"problems\" page")
- parser.add_argument("--hide-graphs", action = "store_true",
- help = "don't generate graphs")
- parser.add_argument("--hide-object-counts", action = "store_true",
- help = "don't display object counts")
- parser.add_argument("--dont-update-rrds", action = "store_true",
- help = "don't add new data to RRD databases")
- parser.add_argument("--png-height", type = int, default = 190,
- help = "height of PNG images")
- parser.add_argument("--png-width", type = int, default = 1350,
- help = "width of PNG images")
- parser.add_argument("--svg-height", type = int, default = 600,
- help = "height of SVG images")
- parser.add_argument("--svg-width", type = int, default = 1200,
- help = "width of SVG images")
- parser.add_argument("--eps-height", type = int, default = 0,
- help = "height of EPS images")
- parser.add_argument("--eps-width", type = int, default = 0,
- help = "width of EPS images")
- parser.add_argument("--rrdtool-binary", default = rpki.autoconf.RRDTOOL,
- help = "location of rrdtool binary")
- parser.add_argument("input_file", type = argparse.FileType("r"),
- help = "XML input file")
- parser.add_argument("output_directory",
- help = "output directory")
- args = parser.parse_args()
+ global args
+
+ parser = argparse.ArgumentParser(description = __doc__)
+ parser.add_argument("--refresh", type = int, default = 1800,
+ help = "refresh interval for generated HTML")
+ parser.add_argument("--hide-problems", action = "store_true",
+ help = "don't generate \"problems\" page")
+ parser.add_argument("--hide-graphs", action = "store_true",
+ help = "don't generate graphs")
+ parser.add_argument("--hide-object-counts", action = "store_true",
+ help = "don't display object counts")
+ parser.add_argument("--dont-update-rrds", action = "store_true",
+ help = "don't add new data to RRD databases")
+ parser.add_argument("--png-height", type = int, default = 190,
+ help = "height of PNG images")
+ parser.add_argument("--png-width", type = int, default = 1350,
+ help = "width of PNG images")
+ parser.add_argument("--svg-height", type = int, default = 600,
+ help = "height of SVG images")
+ parser.add_argument("--svg-width", type = int, default = 1200,
+ help = "width of SVG images")
+ parser.add_argument("--eps-height", type = int, default = 0,
+ help = "height of EPS images")
+ parser.add_argument("--eps-width", type = int, default = 0,
+ help = "width of EPS images")
+ parser.add_argument("--rrdtool-binary", default = rpki.autoconf.RRDTOOL,
+ help = "location of rrdtool binary")
+ parser.add_argument("input_file", type = argparse.FileType("r"),
+ help = "XML input file")
+ parser.add_argument("output_directory",
+ help = "output directory")
+ args = parser.parse_args()
def parse_utc(s):
- return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ")))
+ return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ")))
class Label(object):
- moods = ["bad", "warn", "good"]
+ moods = ["bad", "warn", "good"]
- def __init__(self, elt):
- self.code = elt.tag
- self.mood = elt.get("kind")
- self.text = elt.text.strip()
- self.count = 0
+ def __init__(self, elt):
+ self.code = elt.tag
+ self.mood = elt.get("kind")
+ self.text = elt.text.strip()
+ self.count = 0
- def get_count(self):
- return self.count
+ def get_count(self):
+ return self.count
- @property
- def sort_key(self):
- try:
- return self.moods.index(self.mood)
- except ValueError:
- return len(self.moods)
+ @property
+ def sort_key(self):
+ try:
+ return self.moods.index(self.mood)
+ except ValueError:
+ return len(self.moods)
class Validation_Status(object):
- def __init__(self, elt, label_map):
- self.uri = elt.text.strip()
- self.timestamp = elt.get("timestamp")
- self.generation = elt.get("generation")
- self.hostname = urlparse.urlparse(self.uri).hostname or "[None]"
- self.fn2 = os.path.splitext(self.uri)[1] or None if self.generation else None
- self.label = label_map[elt.get("status")]
+ def __init__(self, elt, label_map):
+ self.uri = elt.text.strip()
+ self.timestamp = elt.get("timestamp")
+ self.generation = elt.get("generation")
+ self.hostname = urlparse.urlparse(self.uri).hostname or "[None]"
+ self.fn2 = os.path.splitext(self.uri)[1] or None if self.generation else None
+ self.label = label_map[elt.get("status")]
- def sort_key(self):
- return (self.label.sort_key, self.timestamp, self.hostname, self.fn2, self.generation)
+ def sort_key(self):
+ return (self.label.sort_key, self.timestamp, self.hostname, self.fn2, self.generation)
- @property
- def code(self):
- return self.label.code
+ @property
+ def code(self):
+ return self.label.code
- @property
- def mood(self):
- return self.label.mood
+ @property
+ def mood(self):
+ return self.label.mood
- @property
- def accepted(self):
- return self.label.code == "object_accepted"
+ @property
+ def accepted(self):
+ return self.label.code == "object_accepted"
- @property
- def rejected(self):
- return self.label.code == "object_rejected"
+ @property
+ def rejected(self):
+ return self.label.code == "object_rejected"
- @property
- def is_current(self):
- return self.generation == "current"
+ @property
+ def is_current(self):
+ return self.generation == "current"
- @property
- def is_backup(self):
- return self.generation == "backup"
+ @property
+ def is_backup(self):
+ return self.generation == "backup"
- @property
- def is_problem(self):
- return self.label.mood != "good"
+ @property
+ def is_problem(self):
+ return self.label.mood != "good"
- @property
- def is_connection_problem(self):
- return self.label.mood != "good" and self.label.code.startswith("rsync_transfer_")
+ @property
+ def is_connection_problem(self):
+ return self.label.mood != "good" and self.label.code.startswith("rsync_transfer_")
- @property
- def is_object_problem(self):
- return self.label.mood != "good" and not self.label.code.startswith("rsync_transfer_")
+ @property
+ def is_object_problem(self):
+ return self.label.mood != "good" and not self.label.code.startswith("rsync_transfer_")
- @property
- def is_connection_detail(self):
- return self.label.code.startswith("rsync_transfer_")
+ @property
+ def is_connection_detail(self):
+ return self.label.code.startswith("rsync_transfer_")
- @property
- def is_object_detail(self):
- return not self.label.code.startswith("rsync_transfer_")
+ @property
+ def is_object_detail(self):
+ return not self.label.code.startswith("rsync_transfer_")
class Problem_Mixin(object):
- @property
- def connection_problems(self):
- result = [v for v in self.validation_status if v.is_connection_problem]
- result.sort(key = Validation_Status.sort_key)
- return result
+ @property
+ def connection_problems(self):
+ result = [v for v in self.validation_status if v.is_connection_problem]
+ result.sort(key = Validation_Status.sort_key)
+ return result
- @property
- def object_problems(self):
- result = [v for v in self.validation_status if v.is_object_problem]
- result.sort(key = Validation_Status.sort_key)
- return result
+ @property
+ def object_problems(self):
+ result = [v for v in self.validation_status if v.is_object_problem]
+ result.sort(key = Validation_Status.sort_key)
+ return result
class Host(Problem_Mixin):
- def __init__(self, hostname, timestamp):
- self.hostname = hostname
- self.timestamp = timestamp
- self.elapsed = 0
- self.connections = 0
- self.failures = 0
- self.uris = set()
- self.graph = None
- self.counters = {}
- self.totals = {}
- self.validation_status = []
-
- def add_connection(self, elt):
- self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started"))
- self.connections += 1
- if elt.get("error") is not None:
- self.failures += 1
-
- def add_validation_status(self, v):
- self.validation_status.append(v)
- if v.generation == "current":
- self.uris.add(v.uri)
- self.counters[(v.fn2, v.generation, v.label)] = self.get_counter(v.fn2, v.generation, v.label) + 1
- self.totals[v.label] = self.get_total(v.label) + 1
- v.label.count += 1
-
- def get_counter(self, fn2, generation, label):
- return self.counters.get((fn2, generation, label), 0)
-
- def get_total(self, label):
- return self.totals.get(label, 0)
-
- @property
- def failed(self):
- return 1 if self.failures > 0 else 0
-
- @property
- def objects(self):
- return len(self.uris)
-
- field_table = (("connections", "GAUGE"),
- ("objects", "GAUGE"),
- ("elapsed", "GAUGE"),
- ("failed", "ABSOLUTE"))
-
- rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps
- for steps in (1, 4, 24))
-
- @classmethod
- def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"):
- return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum)
- for field in cls.field_table]
-
- @property
- def field_values(self):
- return tuple(str(getattr(self, field[0])) for field in self.field_table)
-
- @classmethod
- def field_defs(cls, filebase):
- return ["DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], filebase, field[0])
- for field in cls.field_table]
-
- graph_opts = (
- "--vertical-label", "Sync time (seconds)",
- "--right-axis-label", "Objects (count)",
- "--lower-limit", "0",
- "--right-axis", "1:0",
- "--full-size-mode" )
-
- graph_cmds = (
-
- # Split elapsed into separate data sets, so we can color
- # differently to indicate how succesful transfer was. Intent is
- # that exactly one of these be defined for every value in elapsed.
-
- r"CDEF:success=failed,UNKN,elapsed,IF",
- r"CDEF:failure=connections,1,EQ,failed,*,elapsed,UNKN,IF",
- r"CDEF:partial=connections,1,NE,failed,*,elapsed,UNKN,IF",
-
- # Show connection timing first, as color-coded semi-transparent
- # areas with opaque borders. Intent is to make the colors stand
- # out, since they're a major health indicator. Transparency is
- # handled via an alpha channel (fourth octet of color code). We
- # draw this stuff first so that later lines can overwrite it.
-
- r"AREA:success#00FF0080:Sync time (success)",
- r"AREA:partial#FFA50080:Sync time (partial failure)",
- r"AREA:failure#FF000080:Sync time (total failure)",
-
- r"LINE1:success#00FF00", # Green
- r"LINE1:partial#FFA500", # Orange
- r"LINE1:failure#FF0000", # Red
-
- # Now show object counts, as a simple black line.
-
- r"LINE1:objects#000000:Objects", # Black
-
- # Add averages over period to chart legend.
-
- r"VDEF:avg_elapsed=elapsed,AVERAGE",
- r"VDEF:avg_connections=connections,AVERAGE",
- r"VDEF:avg_objects=objects,AVERAGE",
- r"COMMENT:\j",
- r"GPRINT:avg_elapsed:Average sync time (seconds)\: %5.2lf",
- r"GPRINT:avg_connections:Average connection count\: %5.2lf",
- r"GPRINT:avg_objects:Average object count\: %5.2lf" )
-
- graph_periods = (("week", "-1w"),
- ("month", "-31d"),
- ("year", "-1y"))
-
- def rrd_run(self, cmd):
- try:
- cmd = [str(i) for i in cmd]
- cmd.insert(0, args.rrdtool_binary)
- subprocess.check_call(cmd, stdout = open("/dev/null", "w"))
- except OSError, e:
- sys.exit("Problem running %s, perhaps you need to set --rrdtool-binary? (%s)" % (args.rrdtool_binary, e))
- except subprocess.CalledProcessError, e:
- sys.exit("Failure running %s: %s" % (args.rrdtool_binary, e))
-
- def rrd_update(self):
- filename = os.path.join(args.output_directory, self.hostname) + ".rrd"
- if not os.path.exists(filename):
- cmd = ["create", filename, "--start", self.timestamp - 1, "--step", "3600"]
- cmd.extend(self.field_ds_specifiers())
- cmd.extend(self.rras)
- self.rrd_run(cmd)
- self.rrd_run(["update", filename,
- "%s:%s" % (self.timestamp, ":".join(str(v) for v in self.field_values))])
-
- def rrd_graph(self, html):
- # pylint: disable=W0622
- filebase = os.path.join(args.output_directory, self.hostname)
- formats = [format for format in ("png", "svg", "eps")
- if getattr(args, format + "_width") and getattr(args, format + "_height")]
- for period, start in self.graph_periods:
- for format in formats:
- cmds = [ "graph", "%s_%s.%s" % (filebase, period, format),
- "--title", "%s last %s" % (self.hostname, period),
- "--start", start,
- "--width", getattr(args, format + "_width"),
- "--height", getattr(args, format + "_height"),
- "--imgformat", format.upper() ]
- cmds.extend(self.graph_opts)
- cmds.extend(self.field_defs(filebase))
- cmds.extend(self.graph_cmds)
- self.rrd_run(cmds)
- img = Element("img", src = "%s_%s.png" % (self.hostname, period),
- width = str(args.png_width),
- height = str(args.png_height))
- if self.graph is None:
- self.graph = copy.copy(img)
- html.BodyElement("h2").text = "%s over last %s" % (self.hostname, period)
- html.BodyElement("a", href = "%s_%s_svg.html" % (self.hostname, period)).append(img)
- html.BodyElement("br")
- svg_html = HTML("%s over last %s" % (self.hostname, period),
- "%s_%s_svg" % (self.hostname, period))
- svg_html.BodyElement("img", src = "%s_%s.svg" % (self.hostname, period))
- svg_html.close()
+ def __init__(self, hostname, timestamp):
+ self.hostname = hostname
+ self.timestamp = timestamp
+ self.elapsed = 0
+ self.connections = 0
+ self.failures = 0
+ self.uris = set()
+ self.graph = None
+ self.counters = {}
+ self.totals = {}
+ self.validation_status = []
+
+ def add_connection(self, elt):
+ self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started"))
+ self.connections += 1
+ if elt.get("error") is not None:
+ self.failures += 1
+
+ def add_validation_status(self, v):
+ self.validation_status.append(v)
+ if v.generation == "current":
+ self.uris.add(v.uri)
+ self.counters[(v.fn2, v.generation, v.label)] = self.get_counter(v.fn2, v.generation, v.label) + 1
+ self.totals[v.label] = self.get_total(v.label) + 1
+ v.label.count += 1
+
+ def get_counter(self, fn2, generation, label):
+ return self.counters.get((fn2, generation, label), 0)
+
+ def get_total(self, label):
+ return self.totals.get(label, 0)
+
+ @property
+ def failed(self):
+ return 1 if self.failures > 0 else 0
+
+ @property
+ def objects(self):
+ return len(self.uris)
+
+ field_table = (("connections", "GAUGE"),
+ ("objects", "GAUGE"),
+ ("elapsed", "GAUGE"),
+ ("failed", "ABSOLUTE"))
+
+ rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps
+ for steps in (1, 4, 24))
+
+ @classmethod
+ def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"):
+ return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum)
+ for field in cls.field_table]
+
+ @property
+ def field_values(self):
+ return tuple(str(getattr(self, field[0])) for field in self.field_table)
+
+ @classmethod
+ def field_defs(cls, filebase):
+ return ["DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], filebase, field[0])
+ for field in cls.field_table]
+
+ graph_opts = (
+ "--vertical-label", "Sync time (seconds)",
+ "--right-axis-label", "Objects (count)",
+ "--lower-limit", "0",
+ "--right-axis", "1:0",
+ "--full-size-mode" )
+
+ graph_cmds = (
+
+ # Split elapsed into separate data sets, so we can color
+ # differently to indicate how succesful transfer was. Intent is
+ # that exactly one of these be defined for every value in elapsed.
+
+ r"CDEF:success=failed,UNKN,elapsed,IF",
+ r"CDEF:failure=connections,1,EQ,failed,*,elapsed,UNKN,IF",
+ r"CDEF:partial=connections,1,NE,failed,*,elapsed,UNKN,IF",
+
+ # Show connection timing first, as color-coded semi-transparent
+ # areas with opaque borders. Intent is to make the colors stand
+ # out, since they're a major health indicator. Transparency is
+ # handled via an alpha channel (fourth octet of color code). We
+ # draw this stuff first so that later lines can overwrite it.
+
+ r"AREA:success#00FF0080:Sync time (success)",
+ r"AREA:partial#FFA50080:Sync time (partial failure)",
+ r"AREA:failure#FF000080:Sync time (total failure)",
+
+ r"LINE1:success#00FF00", # Green
+ r"LINE1:partial#FFA500", # Orange
+ r"LINE1:failure#FF0000", # Red
+
+ # Now show object counts, as a simple black line.
+
+ r"LINE1:objects#000000:Objects", # Black
+
+ # Add averages over period to chart legend.
+
+ r"VDEF:avg_elapsed=elapsed,AVERAGE",
+ r"VDEF:avg_connections=connections,AVERAGE",
+ r"VDEF:avg_objects=objects,AVERAGE",
+ r"COMMENT:\j",
+ r"GPRINT:avg_elapsed:Average sync time (seconds)\: %5.2lf",
+ r"GPRINT:avg_connections:Average connection count\: %5.2lf",
+ r"GPRINT:avg_objects:Average object count\: %5.2lf" )
+
+ graph_periods = (("week", "-1w"),
+ ("month", "-31d"),
+ ("year", "-1y"))
+
+ def rrd_run(self, cmd):
+ try:
+ cmd = [str(i) for i in cmd]
+ cmd.insert(0, args.rrdtool_binary)
+ subprocess.check_call(cmd, stdout = open("/dev/null", "w"))
+ except OSError, e:
+ sys.exit("Problem running %s, perhaps you need to set --rrdtool-binary? (%s)" % (args.rrdtool_binary, e))
+ except subprocess.CalledProcessError, e:
+ sys.exit("Failure running %s: %s" % (args.rrdtool_binary, e))
+
+ def rrd_update(self):
+ filename = os.path.join(args.output_directory, self.hostname) + ".rrd"
+ if not os.path.exists(filename):
+ cmd = ["create", filename, "--start", self.timestamp - 1, "--step", "3600"]
+ cmd.extend(self.field_ds_specifiers())
+ cmd.extend(self.rras)
+ self.rrd_run(cmd)
+ self.rrd_run(["update", filename,
+ "%s:%s" % (self.timestamp, ":".join(str(v) for v in self.field_values))])
+
+ def rrd_graph(self, html):
+ # pylint: disable=W0622
+ filebase = os.path.join(args.output_directory, self.hostname)
+ formats = [format for format in ("png", "svg", "eps")
+ if getattr(args, format + "_width") and getattr(args, format + "_height")]
+ for period, start in self.graph_periods:
+ for format in formats:
+ cmds = [ "graph", "%s_%s.%s" % (filebase, period, format),
+ "--title", "%s last %s" % (self.hostname, period),
+ "--start", start,
+ "--width", getattr(args, format + "_width"),
+ "--height", getattr(args, format + "_height"),
+ "--imgformat", format.upper() ]
+ cmds.extend(self.graph_opts)
+ cmds.extend(self.field_defs(filebase))
+ cmds.extend(self.graph_cmds)
+ self.rrd_run(cmds)
+ img = Element("img", src = "%s_%s.png" % (self.hostname, period),
+ width = str(args.png_width),
+ height = str(args.png_height))
+ if self.graph is None:
+ self.graph = copy.copy(img)
+ html.BodyElement("h2").text = "%s over last %s" % (self.hostname, period)
+ html.BodyElement("a", href = "%s_%s_svg.html" % (self.hostname, period)).append(img)
+ html.BodyElement("br")
+ svg_html = HTML("%s over last %s" % (self.hostname, period),
+ "%s_%s_svg" % (self.hostname, period))
+ svg_html.BodyElement("img", src = "%s_%s.svg" % (self.hostname, period))
+ svg_html.close()
class Session(Problem_Mixin):
- def __init__(self):
- self.hosts = {}
+ def __init__(self):
+ self.hosts = {}
- self.root = ElementTree(file = args.input_file).getroot()
+ self.root = ElementTree(file = args.input_file).getroot()
- self.rcynic_version = self.root.get("rcynic-version")
- self.rcynic_date = self.root.get("date")
- self.timestamp = parse_utc(self.rcynic_date)
+ self.rcynic_version = self.root.get("rcynic-version")
+ self.rcynic_date = self.root.get("date")
+ self.timestamp = parse_utc(self.rcynic_date)
- self.labels = [Label(elt) for elt in self.root.find("labels")]
- self.load_validation_status()
+ self.labels = [Label(elt) for elt in self.root.find("labels")]
+ self.load_validation_status()
- for elt in self.root.findall("rsync_history"):
- self.get_host(urlparse.urlparse(elt.text.strip()).hostname).add_connection(elt)
+ for elt in self.root.findall("rsync_history"):
+ self.get_host(urlparse.urlparse(elt.text.strip()).hostname).add_connection(elt)
- generations = set()
- fn2s = set()
+ generations = set()
+ fn2s = set()
- for v in self.validation_status:
- self.get_host(v.hostname).add_validation_status(v)
- generations.add(v.generation)
- fn2s.add(v.fn2)
+ for v in self.validation_status:
+ self.get_host(v.hostname).add_validation_status(v)
+ generations.add(v.generation)
+ fn2s.add(v.fn2)
- self.labels = [l for l in self.labels if l.count > 0]
+ self.labels = [l for l in self.labels if l.count > 0]
- self.hostnames = sorted(self.hosts)
- self.generations = sorted(generations)
- self.fn2s = sorted(fn2s)
+ self.hostnames = sorted(self.hosts)
+ self.generations = sorted(generations)
+ self.fn2s = sorted(fn2s)
- def load_validation_status(self):
- label_map = dict((label.code, label) for label in self.labels)
- full_validation_status = [Validation_Status(elt, label_map)
- for elt in self.root.findall("validation_status")]
- accepted_current = set(v.uri for v in full_validation_status
- if v.is_current and v.accepted)
- self.validation_status = [v for v in full_validation_status
- if not v.is_backup
- or v.uri not in accepted_current]
+ def load_validation_status(self):
+ label_map = dict((label.code, label) for label in self.labels)
+ full_validation_status = [Validation_Status(elt, label_map)
+ for elt in self.root.findall("validation_status")]
+ accepted_current = set(v.uri for v in full_validation_status
+ if v.is_current and v.accepted)
+ self.validation_status = [v for v in full_validation_status
+ if not v.is_backup
+ or v.uri not in accepted_current]
- def get_host(self, hostname):
- if hostname not in self.hosts:
- self.hosts[hostname] = Host(hostname, self.timestamp)
- return self.hosts[hostname]
+ def get_host(self, hostname):
+ if hostname not in self.hosts:
+ self.hosts[hostname] = Host(hostname, self.timestamp)
+ return self.hosts[hostname]
- def get_sum(self, fn2, generation, label):
- return sum(h.get_counter(fn2, generation, label)
- for h in self.hosts.itervalues())
+ def get_sum(self, fn2, generation, label):
+ return sum(h.get_counter(fn2, generation, label)
+ for h in self.hosts.itervalues())
- def rrd_update(self):
- if not args.dont_update_rrds:
- for h in self.hosts.itervalues():
- h.rrd_update()
+ def rrd_update(self):
+ if not args.dont_update_rrds:
+ for h in self.hosts.itervalues():
+ h.rrd_update()
css = '''
th, td {
@@ -475,183 +475,183 @@ css = '''
class HTML(object):
- def __init__(self, title, filebase):
+ def __init__(self, title, filebase):
+
+ self.filename = os.path.join(args.output_directory, filebase + ".html")
+
+ self.html = Element("html")
+ self.html.append(Comment(" Generators:\n" +
+ " " + session.rcynic_version + "\n" +
+ " $Id$\n"))
+ self.head = SubElement(self.html, "head")
+ self.body = SubElement(self.html, "body")
+
+ title += " " + session.rcynic_date
+ SubElement(self.head, "title").text = title
+ SubElement(self.body, "h1").text = title
+ SubElement(self.head, "style", type = "text/css").text = css
+
+ if args.refresh:
+ SubElement(self.head, "meta", { "http-equiv" : "Refresh", "content" : str(args.refresh) })
+
+ hostwidth = max(len(hostname) for hostname in session.hostnames)
+
+ toc = SubElement(self.body, "ul", id = "nav")
+ SubElement(SubElement(toc, "li"), "a", href = "index.html").text = "Overview"
+ li = SubElement(toc, "li")
+ SubElement(li, "span").text = "Repositories"
+ ul = SubElement(li, "ul", style = "width: %sem" % hostwidth)
+ for hostname in session.hostnames:
+ SubElement(SubElement(ul, "li"), "a", href = "%s.html" % hostname).text = hostname
+ SubElement(SubElement(toc, "li"), "a", href = "problems.html").text = "Problems"
+ li = SubElement(toc, "li")
+ SubElement(li, "span").text = "All Details"
+ ul = SubElement(li, "ul", style = "width: 15em")
+ SubElement(SubElement(ul, "li"), "a", href = "connections.html").text = "All Connections"
+ SubElement(SubElement(ul, "li"), "a", href = "objects.html").text = "All Objects"
+ SubElement(self.body, "br")
+
+ def close(self):
+ ElementTree(element = self.html).write(self.filename)
+
+ def BodyElement(self, tag, **attrib):
+ return SubElement(self.body, tag, **attrib)
+
+ def counter_table(self, data_func, total_func):
+ table = self.BodyElement("table", rules = "all", border = "1")
+ thead = SubElement(table, "thead")
+ tfoot = SubElement(table, "tfoot")
+ tbody = SubElement(table, "tbody")
+ tr = SubElement(thead, "tr")
+ SubElement(tr, "th")
+ for label in session.labels:
+ SubElement(tr, "th").text = label.text
+ for fn2 in session.fn2s:
+ for generation in session.generations:
+ counters = [data_func(fn2, generation, label) for label in session.labels]
+ if sum(counters) > 0:
+ tr = SubElement(tbody, "tr")
+ SubElement(tr, "td").text = ((generation or "") + " " + (fn2 or "")).strip()
+ for label, count in zip(session.labels, counters):
+ td = SubElement(tr, "td")
+ if count > 0:
+ td.set("class", label.mood)
+ td.text = str(count)
+ tr = SubElement(tfoot, "tr")
+ SubElement(tr, "td").text = "Total"
+ counters = [total_func(label) for label in session.labels]
+ for label, count in zip(session.labels, counters):
+ td = SubElement(tr, "td")
+ if count > 0:
+ td.set("class", label.mood)
+ td.text = str(count)
+ return table
+
+ def object_count_table(self, session): # pylint: disable=W0621
+ table = self.BodyElement("table", rules = "all", border = "1")
+ thead = SubElement(table, "thead")
+ tbody = SubElement(table, "tbody")
+ tfoot = SubElement(table, "tfoot")
+ fn2s = [fn2 for fn2 in session.fn2s if fn2 is not None]
+ total = dict((fn2, 0) for fn2 in fn2s)
+ for hostname in session.hostnames:
+ tr = SubElement(tbody, "tr")
+ SubElement(tr, "td").text = hostname
+ for fn2 in fn2s:
+ td = SubElement(tr, "td")
+ count = sum(uri.endswith(fn2) for uri in session.hosts[hostname].uris)
+ total[fn2] += count
+ if count > 0:
+ td.text = str(count)
+ trhead = SubElement(thead, "tr")
+ trfoot = SubElement(tfoot, "tr")
+ SubElement(trhead, "th").text = "Repository"
+ SubElement(trfoot, "td").text = "Total"
+ for fn2 in fn2s:
+ SubElement(trhead, "th").text = fn2
+ SubElement(trfoot, "td").text = str(total[fn2])
+ return table
+
+ def detail_table(self, records):
+ if records:
+ table = self.BodyElement("table", rules = "all", border = "1")
+ thead = SubElement(table, "thead")
+ tbody = SubElement(table, "tbody")
+ tr = SubElement(thead, "tr")
+ SubElement(tr, "th").text = "Timestamp"
+ SubElement(tr, "th").text = "Generation"
+ SubElement(tr, "th").text = "Status"
+ SubElement(tr, "th").text = "URI"
+ for v in records:
+ tr = SubElement(tbody, "tr", { "class" : v.mood })
+ SubElement(tr, "td").text = v.timestamp
+ SubElement(tr, "td").text = v.generation
+ SubElement(tr, "td").text = v.label.text
+ SubElement(tr, "td", { "class" : "uri"}).text = v.uri
+ return table
+ else:
+ self.BodyElement("p").text = "None found"
+ return None
- self.filename = os.path.join(args.output_directory, filebase + ".html")
+def main():
- self.html = Element("html")
- self.html.append(Comment(" Generators:\n" +
- " " + session.rcynic_version + "\n" +
- " $Id$\n"))
- self.head = SubElement(self.html, "head")
- self.body = SubElement(self.html, "body")
+ global session
- title += " " + session.rcynic_date
- SubElement(self.head, "title").text = title
- SubElement(self.body, "h1").text = title
- SubElement(self.head, "style", type = "text/css").text = css
+ os.putenv("TZ", "UTC")
+ time.tzset()
- if args.refresh:
- SubElement(self.head, "meta", { "http-equiv" : "Refresh", "content" : str(args.refresh) })
+ parse_options()
- hostwidth = max(len(hostname) for hostname in session.hostnames)
+ session = Session()
+ session.rrd_update()
- toc = SubElement(self.body, "ul", id = "nav")
- SubElement(SubElement(toc, "li"), "a", href = "index.html").text = "Overview"
- li = SubElement(toc, "li")
- SubElement(li, "span").text = "Repositories"
- ul = SubElement(li, "ul", style = "width: %sem" % hostwidth)
for hostname in session.hostnames:
- SubElement(SubElement(ul, "li"), "a", href = "%s.html" % hostname).text = hostname
- SubElement(SubElement(toc, "li"), "a", href = "problems.html").text = "Problems"
- li = SubElement(toc, "li")
- SubElement(li, "span").text = "All Details"
- ul = SubElement(li, "ul", style = "width: 15em")
- SubElement(SubElement(ul, "li"), "a", href = "connections.html").text = "All Connections"
- SubElement(SubElement(ul, "li"), "a", href = "objects.html").text = "All Objects"
- SubElement(self.body, "br")
-
- def close(self):
- ElementTree(element = self.html).write(self.filename)
-
- def BodyElement(self, tag, **attrib):
- return SubElement(self.body, tag, **attrib)
-
- def counter_table(self, data_func, total_func):
- table = self.BodyElement("table", rules = "all", border = "1")
- thead = SubElement(table, "thead")
- tfoot = SubElement(table, "tfoot")
- tbody = SubElement(table, "tbody")
- tr = SubElement(thead, "tr")
- SubElement(tr, "th")
- for label in session.labels:
- SubElement(tr, "th").text = label.text
- for fn2 in session.fn2s:
- for generation in session.generations:
- counters = [data_func(fn2, generation, label) for label in session.labels]
- if sum(counters) > 0:
- tr = SubElement(tbody, "tr")
- SubElement(tr, "td").text = ((generation or "") + " " + (fn2 or "")).strip()
- for label, count in zip(session.labels, counters):
- td = SubElement(tr, "td")
- if count > 0:
- td.set("class", label.mood)
- td.text = str(count)
- tr = SubElement(tfoot, "tr")
- SubElement(tr, "td").text = "Total"
- counters = [total_func(label) for label in session.labels]
- for label, count in zip(session.labels, counters):
- td = SubElement(tr, "td")
- if count > 0:
- td.set("class", label.mood)
- td.text = str(count)
- return table
-
- def object_count_table(self, session): # pylint: disable=W0621
- table = self.BodyElement("table", rules = "all", border = "1")
- thead = SubElement(table, "thead")
- tbody = SubElement(table, "tbody")
- tfoot = SubElement(table, "tfoot")
- fn2s = [fn2 for fn2 in session.fn2s if fn2 is not None]
- total = dict((fn2, 0) for fn2 in fn2s)
+ html = HTML("Repository details for %s" % hostname, hostname)
+ html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total)
+ if not args.hide_graphs:
+ session.hosts[hostname].rrd_graph(html)
+ if not args.hide_problems:
+ html.BodyElement("h2").text = "Connection Problems"
+ html.detail_table(session.hosts[hostname].connection_problems)
+ html.BodyElement("h2").text = "Object Problems"
+ html.detail_table(session.hosts[hostname].object_problems)
+ html.close()
+
+ html = HTML("rcynic summary", "index")
+ html.BodyElement("h2").text = "Grand totals for all repositories"
+ html.counter_table(session.get_sum, Label.get_count)
+ if not args.hide_object_counts:
+ html.BodyElement("br")
+ html.BodyElement("hr")
+ html.BodyElement("br")
+ html.BodyElement("h2").text = "Current total object counts (distinct URIs)"
+ html.object_count_table(session)
for hostname in session.hostnames:
- tr = SubElement(tbody, "tr")
- SubElement(tr, "td").text = hostname
- for fn2 in fn2s:
- td = SubElement(tr, "td")
- count = sum(uri.endswith(fn2) for uri in session.hosts[hostname].uris)
- total[fn2] += count
- if count > 0:
- td.text = str(count)
- trhead = SubElement(thead, "tr")
- trfoot = SubElement(tfoot, "tr")
- SubElement(trhead, "th").text = "Repository"
- SubElement(trfoot, "td").text = "Total"
- for fn2 in fn2s:
- SubElement(trhead, "th").text = fn2
- SubElement(trfoot, "td").text = str(total[fn2])
- return table
-
- def detail_table(self, records):
- if records:
- table = self.BodyElement("table", rules = "all", border = "1")
- thead = SubElement(table, "thead")
- tbody = SubElement(table, "tbody")
- tr = SubElement(thead, "tr")
- SubElement(tr, "th").text = "Timestamp"
- SubElement(tr, "th").text = "Generation"
- SubElement(tr, "th").text = "Status"
- SubElement(tr, "th").text = "URI"
- for v in records:
- tr = SubElement(tbody, "tr", { "class" : v.mood })
- SubElement(tr, "td").text = v.timestamp
- SubElement(tr, "td").text = v.generation
- SubElement(tr, "td").text = v.label.text
- SubElement(tr, "td", { "class" : "uri"}).text = v.uri
- return table
- else:
- self.BodyElement("p").text = "None found"
- return None
-
-def main():
-
- global session
-
- os.putenv("TZ", "UTC")
- time.tzset()
-
- parse_options()
+ html.BodyElement("br")
+ html.BodyElement("hr")
+ html.BodyElement("br")
+ html.BodyElement("h2").text = "Overview for repository %s" % hostname
+ html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total)
+ if not args.hide_graphs:
+ html.BodyElement("br")
+ html.BodyElement("a", href = "%s.html" % hostname).append(session.hosts[hostname].graph)
+ html.close()
- session = Session()
- session.rrd_update()
+ html = HTML("Problems", "problems")
+ html.BodyElement("h2").text = "Connection Problems"
+ html.detail_table(session.connection_problems)
+ html.BodyElement("h2").text = "Object Problems"
+ html.detail_table(session.object_problems)
+ html.close()
- for hostname in session.hostnames:
- html = HTML("Repository details for %s" % hostname, hostname)
- html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total)
- if not args.hide_graphs:
- session.hosts[hostname].rrd_graph(html)
- if not args.hide_problems:
- html.BodyElement("h2").text = "Connection Problems"
- html.detail_table(session.hosts[hostname].connection_problems)
- html.BodyElement("h2").text = "Object Problems"
- html.detail_table(session.hosts[hostname].object_problems)
+ html = HTML("All connections", "connections")
+ html.detail_table([v for v in session.validation_status if v.is_connection_detail])
html.close()
- html = HTML("rcynic summary", "index")
- html.BodyElement("h2").text = "Grand totals for all repositories"
- html.counter_table(session.get_sum, Label.get_count)
- if not args.hide_object_counts:
- html.BodyElement("br")
- html.BodyElement("hr")
- html.BodyElement("br")
- html.BodyElement("h2").text = "Current total object counts (distinct URIs)"
- html.object_count_table(session)
- for hostname in session.hostnames:
- html.BodyElement("br")
- html.BodyElement("hr")
- html.BodyElement("br")
- html.BodyElement("h2").text = "Overview for repository %s" % hostname
- html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total)
- if not args.hide_graphs:
- html.BodyElement("br")
- html.BodyElement("a", href = "%s.html" % hostname).append(session.hosts[hostname].graph)
- html.close()
-
- html = HTML("Problems", "problems")
- html.BodyElement("h2").text = "Connection Problems"
- html.detail_table(session.connection_problems)
- html.BodyElement("h2").text = "Object Problems"
- html.detail_table(session.object_problems)
- html.close()
-
- html = HTML("All connections", "connections")
- html.detail_table([v for v in session.validation_status if v.is_connection_detail])
- html.close()
-
- html = HTML("All objects", "objects")
- html.detail_table([v for v in session.validation_status if v.is_object_detail])
- html.close()
+ html = HTML("All objects", "objects")
+ html.detail_table([v for v in session.validation_status if v.is_object_detail])
+ html.close()
if __name__ == "__main__":
- main()
+ main()
diff --git a/rp/rcynic/rcynic-svn b/rp/rcynic/rcynic-svn
index 28b24672..a9417d8d 100755
--- a/rp/rcynic/rcynic-svn
+++ b/rp/rcynic/rcynic-svn
@@ -27,50 +27,50 @@ import fcntl
import os
try:
- from lxml.etree import ElementTree
+ from lxml.etree import ElementTree
except ImportError:
- from xml.etree.ElementTree import ElementTree
+ from xml.etree.ElementTree import ElementTree
mime_types = (
- ("html", "application/xhtml+xml"),
- ("cer", "application/pkix-cert"),
- ("crl", "application/pkix-crl"),
- ("mft", "application/rpki-manifest"),
- ("mnf", "application/rpki-manifest"),
- ("roa", "application/rpki-roa"),
- ("gbr", "application/rpki-ghostbusters"))
+ ("html", "application/xhtml+xml"),
+ ("cer", "application/pkix-cert"),
+ ("crl", "application/pkix-crl"),
+ ("mft", "application/rpki-manifest"),
+ ("mnf", "application/rpki-manifest"),
+ ("roa", "application/rpki-roa"),
+ ("gbr", "application/rpki-ghostbusters"))
def run(*argv, **kwargs):
- """
- Run a program, displaying timing data when appropriate.
- """
+ """
+ Run a program, displaying timing data when appropriate.
+ """
- _t0 = datetime.datetime.utcnow()
- subprocess.check_call(argv, **kwargs)
- if args.show_timing:
- _t1 = datetime.datetime.utcnow()
- print _t1, (_t1 - _t0), " ".join(argv)
+ _t0 = datetime.datetime.utcnow()
+ subprocess.check_call(argv, **kwargs)
+ if args.show_timing:
+ _t1 = datetime.datetime.utcnow()
+ print _t1, (_t1 - _t0), " ".join(argv)
def runxml(*argv):
- """
-
- Run a program which produces XML output, displaying timing data when
- appropriate and returning an ElementTree constructed from the
- program's output.
- """
- _t0 = datetime.datetime.utcnow()
- p = subprocess.Popen(argv, stdout = subprocess.PIPE)
- x = ElementTree(file = p.stdout)
- s = p.wait()
- if s:
- raise subprocess.CalledProcessError(s, argv[0])
- if args.show_timing:
- _t1 = datetime.datetime.utcnow()
- print _t1, (_t1 - _t0), " ".join(argv)
- return x
+ """
+
+ Run a program which produces XML output, displaying timing data when
+ appropriate and returning an ElementTree constructed from the
+ program's output.
+ """
+ _t0 = datetime.datetime.utcnow()
+ p = subprocess.Popen(argv, stdout = subprocess.PIPE)
+ x = ElementTree(file = p.stdout)
+ s = p.wait()
+ if s:
+ raise subprocess.CalledProcessError(s, argv[0])
+ if args.show_timing:
+ _t1 = datetime.datetime.utcnow()
+ print _t1, (_t1 - _t0), " ".join(argv)
+ return x
# Main program.
@@ -120,8 +120,8 @@ parser.add_argument("working_directory", help = \
args = parser.parse_args()
if args.show_timing:
- t0 = datetime.datetime.utcnow()
- print t0, "Starting"
+ t0 = datetime.datetime.utcnow()
+ print t0, "Starting"
# Lock out other instances of this program. We may want some more
# sophsiticated approach when combining this with other programs, but
@@ -141,18 +141,18 @@ run("svn", "update", "--quiet", args.working_directory)
if args.files_to_archive:
- if args.verbatim:
- cmd = ["rsync", "--archive", "--quiet", "--delete"]
- cmd.extend(args.files_to_archive)
- cmd.append(args.working_directory)
- run(*cmd)
+ if args.verbatim:
+ cmd = ["rsync", "--archive", "--quiet", "--delete"]
+ cmd.extend(args.files_to_archive)
+ cmd.append(args.working_directory)
+ run(*cmd)
- else:
- for src in args.files_to_archive:
- cmd = ["rsync", "--archive", "--quiet", "--delete", "--copy-links"]
- cmd.append(src.rstrip("/"))
- cmd.append(args.working_directory.rstrip("/") + "/")
- run(*cmd)
+ else:
+ for src in args.files_to_archive:
+ cmd = ["rsync", "--archive", "--quiet", "--delete", "--copy-links"]
+ cmd.append(src.rstrip("/"))
+ cmd.append(args.working_directory.rstrip("/") + "/")
+ run(*cmd)
# Ask Subversion to add any new files, trying hard to get the MIME
# types right.
@@ -160,8 +160,8 @@ if args.files_to_archive:
cmd = ["svn", "add", "--quiet", "--force", "--auto-props"]
for fn2, mime_type in mime_types:
- cmd.append("--config-option")
- cmd.append("config:auto-props:*.%s=svn:mime-type=%s" % (fn2, mime_type))
+ cmd.append("--config-option")
+ cmd.append("config:auto-props:*.%s=svn:mime-type=%s" % (fn2, mime_type))
cmd.append(".")
@@ -177,9 +177,9 @@ missing = sorted(entry.get("path")
deleted = []
for path in missing:
- if not any(path.startswith(r) for r in deleted):
- run("svn", "delete", "--quiet", path)
- deleted.append(path + "/")
+ if not any(path.startswith(r) for r in deleted):
+ run("svn", "delete", "--quiet", path)
+ deleted.append(path + "/")
# Commit our changes and update the working tree.
@@ -187,5 +187,5 @@ run("svn", "commit", "--quiet", "--message", "Auto update.", args.working_direct
run("svn", "update", "--quiet", args.working_directory)
if args.show_timing:
- t1 = datetime.datetime.utcnow()
- print t1, t1 - t0, "total runtime"
+ t1 = datetime.datetime.utcnow()
+ print t1, t1 - t0, "total runtime"
diff --git a/rp/rcynic/rcynic-text b/rp/rcynic/rcynic-text
index db4126ce..d4a5b23e 100755
--- a/rp/rcynic/rcynic-text
+++ b/rp/rcynic/rcynic-text
@@ -25,96 +25,96 @@ import urlparse
import textwrap
try:
- from lxml.etree import ElementTree
+ from lxml.etree import ElementTree
except ImportError:
- from xml.etree.ElementTree import ElementTree
+ from xml.etree.ElementTree import ElementTree
class Label(object):
- def __init__(self, elt):
- self.tag = elt.tag
- self.width = max(len(s) for s in elt.text.split())
- self.lines = textwrap.wrap(elt.text.strip(), width = self.width)
- self.counter = 0
+ def __init__(self, elt):
+ self.tag = elt.tag
+ self.width = max(len(s) for s in elt.text.split())
+ self.lines = textwrap.wrap(elt.text.strip(), width = self.width)
+ self.counter = 0
- def line(self, n):
- try:
- return " " + self.lines[n].center(self.width) + " "
- except IndexError:
- return " " * (self.width + 2)
+ def line(self, n):
+ try:
+ return " " + self.lines[n].center(self.width) + " "
+ except IndexError:
+ return " " * (self.width + 2)
- def add(self):
- self.counter += 1
+ def add(self):
+ self.counter += 1
- @property
- def total(self):
- return " " + str(self.counter).rjust(self.width) + " "
+ @property
+ def total(self):
+ return " " + str(self.counter).rjust(self.width) + " "
- @property
- def visible(self):
- return self.counter > 0
+ @property
+ def visible(self):
+ return self.counter > 0
class Host(object):
- def __init__(self):
- self.counters = {}
+ def __init__(self):
+ self.counters = {}
- def add(self, label):
- self.counters[label] = self.counters.get(label, 0) + 1
- label.add()
+ def add(self, label):
+ self.counters[label] = self.counters.get(label, 0) + 1
+ label.add()
- def total(self, label):
- if label in self.counters:
- return " " + str(self.counters[label]).rjust(label.width) + " "
- else:
- return " " * (label.width + 2)
+ def total(self, label):
+ if label in self.counters:
+ return " " + str(self.counters[label]).rjust(label.width) + " "
+ else:
+ return " " * (label.width + 2)
class Session(object):
- def __init__(self, labels):
- self.hosts = {}
- self.labels = labels
- self.map = dict((label.tag, label) for label in labels)
-
- def add(self, elt):
- label = self.map[elt.get("status")]
- hostname = urlparse.urlparse(elt.text.strip()).hostname
- if hostname not in self.hosts:
- self.hosts[hostname] = Host()
- self.hosts[hostname].add(label)
-
- def show(self):
- visible = [label for label in self.labels if label.visible]
- hostnames = sorted(hostname for hostname in self.hosts if hostname is not None)
- hostwidth = max(len(hostname) for hostname in hostnames + ["Hostname"])
- separator = "+-%s-+-%s-+" % (
- "-" * hostwidth,
- "-+-".join("-" * label.width for label in visible))
- print separator
- for i in xrange(max(len(label.lines) for label in visible)):
- print "| %s |%s|" % (
- ("Hostname" if i == 0 else "").ljust(hostwidth),
- "|".join(label.line(i) for label in visible))
- print separator
- for hostname in hostnames:
- print "| %s |%s|" % (
- hostname.ljust(hostwidth),
- "|".join(self.hosts[hostname].total(label) for label in visible))
- if hostnames:
- print separator
- print "| %s |%s|" % (
- "Total".ljust(hostwidth),
- "|".join(label.total for label in visible))
- print separator
+ def __init__(self, labels):
+ self.hosts = {}
+ self.labels = labels
+ self.map = dict((label.tag, label) for label in labels)
+
+ def add(self, elt):
+ label = self.map[elt.get("status")]
+ hostname = urlparse.urlparse(elt.text.strip()).hostname
+ if hostname not in self.hosts:
+ self.hosts[hostname] = Host()
+ self.hosts[hostname].add(label)
+
+ def show(self):
+ visible = [label for label in self.labels if label.visible]
+ hostnames = sorted(hostname for hostname in self.hosts if hostname is not None)
+ hostwidth = max(len(hostname) for hostname in hostnames + ["Hostname"])
+ separator = "+-%s-+-%s-+" % (
+ "-" * hostwidth,
+ "-+-".join("-" * label.width for label in visible))
+ print separator
+ for i in xrange(max(len(label.lines) for label in visible)):
+ print "| %s |%s|" % (
+ ("Hostname" if i == 0 else "").ljust(hostwidth),
+ "|".join(label.line(i) for label in visible))
+ print separator
+ for hostname in hostnames:
+ print "| %s |%s|" % (
+ hostname.ljust(hostwidth),
+ "|".join(self.hosts[hostname].total(label) for label in visible))
+ if hostnames:
+ print separator
+ print "| %s |%s|" % (
+ "Total".ljust(hostwidth),
+ "|".join(label.total for label in visible))
+ print separator
def main():
- for filename in ([sys.stdin] if len(sys.argv) < 2 else sys.argv[1:]):
- etree = ElementTree(file = filename)
- session = Session([Label(elt) for elt in etree.find("labels")])
- for elt in etree.findall("validation_status"):
- session.add(elt)
- session.show()
+ for filename in ([sys.stdin] if len(sys.argv) < 2 else sys.argv[1:]):
+ etree = ElementTree(file = filename)
+ session = Session([Label(elt) for elt in etree.find("labels")])
+ for elt in etree.findall("validation_status"):
+ session.add(elt)
+ session.show()
if __name__ == "__main__":
- main()
+ main()
diff --git a/rp/rcynic/rpki-torrent.py b/rp/rcynic/rpki-torrent.py
index 2c6aa64d..f9a3d620 100644
--- a/rp/rcynic/rpki-torrent.py
+++ b/rp/rcynic/rpki-torrent.py
@@ -46,688 +46,688 @@ import transmissionrpc
tr_env_vars = ("TR_TORRENT_DIR", "TR_TORRENT_ID", "TR_TORRENT_NAME")
class WrongServer(Exception):
- "Hostname not in X.509v3 subjectAltName extension."
+ "Hostname not in X.509v3 subjectAltName extension."
class UnexpectedRedirect(Exception):
- "Unexpected HTTP redirect."
+ "Unexpected HTTP redirect."
class WrongMode(Exception):
- "Wrong operation for mode."
+ "Wrong operation for mode."
class BadFormat(Exception):
- "Zip file does not match our expectations."
+ "Zip file does not match our expectations."
class InconsistentEnvironment(Exception):
- "Environment variables received from Transmission aren't consistent."
+ "Environment variables received from Transmission aren't consistent."
class TorrentNotReady(Exception):
- "Torrent is not ready for checking."
+ "Torrent is not ready for checking."
class TorrentDoesNotMatchManifest(Exception):
- "Retrieved torrent does not match manifest."
+ "Retrieved torrent does not match manifest."
class TorrentNameDoesNotMatchURL(Exception):
- "Torrent name doesn't uniquely match a URL."
+ "Torrent name doesn't uniquely match a URL."
class CouldNotFindTorrents(Exception):
- "Could not find torrent(s) with given name(s)."
+ "Could not find torrent(s) with given name(s)."
class UseTheSourceLuke(Exception):
- "Use The Source, Luke."
+ "Use The Source, Luke."
cfg = None
def main():
- try:
- syslog_flags = syslog.LOG_PID
- if os.isatty(sys.stderr.fileno()):
- syslog_flags |= syslog.LOG_PERROR
- syslog.openlog("rpki-torrent", syslog_flags)
-
- # If I seriously expected this script to get a lot of further use,
- # I might rewrite this using subparsers, but it'd be a bit tricky
- # as argparse doesn't support making the subparser argument
- # optional and transmission gives no sane way to provide arguments
- # when running a completion script. So, for the moment, let's
- # just fix the bugs accidently introduced while converting the
- # universe to argparse without making any radical changes to the
- # program structure here, even if the result looks kind of klunky.
-
- parser = argparse.ArgumentParser(description = __doc__)
- parser.add_argument("-c", "--config",
- help = "configuration file")
- parser.add_argument("action", choices = ("poll", "generate", "mirror"), nargs = "?",
- help = "action to take")
- args = parser.parse_args()
-
- global cfg
- cfg = MyConfigParser()
- cfg.read(args.config or
- [os.path.join(dn, fn)
- for fn in ("rcynic.conf", "rpki.conf")
- for dn in ("/var/rcynic/etc", "/usr/local/etc", "/etc")])
-
- if cfg.act_as_generator:
- if args.action == "generate":
- generator_main()
- elif args.action == "mirror":
- mirror_main()
- else:
- raise UseTheSourceLuke
- else:
- if args.action is None and all(v in os.environ for v in tr_env_vars):
- torrent_completion_main()
- elif args.action == "poll":
- poll_main()
- else:
- raise UseTheSourceLuke
-
- except:
- for line in traceback.format_exc().splitlines():
- syslog.syslog(line)
- sys.exit(1)
+ try:
+ syslog_flags = syslog.LOG_PID
+ if os.isatty(sys.stderr.fileno()):
+ syslog_flags |= syslog.LOG_PERROR
+ syslog.openlog("rpki-torrent", syslog_flags)
+
+ # If I seriously expected this script to get a lot of further use,
+ # I might rewrite this using subparsers, but it'd be a bit tricky
+ # as argparse doesn't support making the subparser argument
+ # optional and transmission gives no sane way to provide arguments
+ # when running a completion script. So, for the moment, let's
+ # just fix the bugs accidently introduced while converting the
+ # universe to argparse without making any radical changes to the
+ # program structure here, even if the result looks kind of klunky.
+
+ parser = argparse.ArgumentParser(description = __doc__)
+ parser.add_argument("-c", "--config",
+ help = "configuration file")
+ parser.add_argument("action", choices = ("poll", "generate", "mirror"), nargs = "?",
+ help = "action to take")
+ args = parser.parse_args()
+
+ global cfg
+ cfg = MyConfigParser()
+ cfg.read(args.config or
+ [os.path.join(dn, fn)
+ for fn in ("rcynic.conf", "rpki.conf")
+ for dn in ("/var/rcynic/etc", "/usr/local/etc", "/etc")])
+
+ if cfg.act_as_generator:
+ if args.action == "generate":
+ generator_main()
+ elif args.action == "mirror":
+ mirror_main()
+ else:
+ raise UseTheSourceLuke
+ else:
+ if args.action is None and all(v in os.environ for v in tr_env_vars):
+ torrent_completion_main()
+ elif args.action == "poll":
+ poll_main()
+ else:
+ raise UseTheSourceLuke
+
+ except:
+ for line in traceback.format_exc().splitlines():
+ syslog.syslog(line)
+ sys.exit(1)
def generator_main():
- import paramiko
-
- class SFTPClient(paramiko.SFTPClient):
- def atomic_rename(self, oldpath, newpath):
- oldpath = self._adjust_cwd(oldpath)
- newpath = self._adjust_cwd(newpath)
- self._log(paramiko.common.DEBUG, 'atomic_rename(%r, %r)' % (oldpath, newpath))
- self._request(paramiko.sftp.CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath)
-
- z = ZipFile(url = cfg.generate_url, dn = cfg.zip_dir)
- client = TransmissionClient()
-
- client.remove_torrents(z.torrent_name)
-
- download_dir = client.get_session().download_dir
- torrent_dir = os.path.join(download_dir, z.torrent_name)
- torrent_file = os.path.join(cfg.zip_dir, z.torrent_name + ".torrent")
-
-
- syslog.syslog("Synchronizing local data from %s to %s" % (cfg.unauthenticated, torrent_dir))
- subprocess.check_call((cfg.rsync_prog, "--archive", "--delete",
- os.path.normpath(cfg.unauthenticated) + "/",
- os.path.normpath(torrent_dir) + "/"))
-
- syslog.syslog("Creating %s" % torrent_file)
- try:
- os.unlink(torrent_file)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
- ignore_output_for_now = subprocess.check_output( # pylint: disable=W0612
- (cfg.mktorrent_prog,
- "-a", cfg.tracker_url,
- "-c", "RPKI unauthenticated data snapshot generated by rpki-torrent",
- "-o", torrent_file,
- torrent_dir))
-
- syslog.syslog("Generating manifest")
- manifest = create_manifest(download_dir, z.torrent_name)
-
- syslog.syslog("Loading %s with unlimited seeding" % torrent_file)
- f = open(torrent_file, "rb")
- client.add(base64.b64encode(f.read()))
- f.close()
- client.unlimited_seeding(z.torrent_name)
-
- syslog.syslog("Creating upload connection")
- ssh = paramiko.Transport((cfg.sftp_host, cfg.sftp_port))
- try:
- hostkeys = paramiko.util.load_host_keys(cfg.sftp_hostkey_file)[cfg.sftp_host]["ssh-rsa"]
- except ConfigParser.Error:
- hostkeys = None
- ssh.connect(
- username = cfg.sftp_user,
- hostkey = hostkeys,
- pkey = paramiko.RSAKey.from_private_key_file(cfg.sftp_private_key_file))
- sftp = SFTPClient.from_transport(ssh)
-
- zip_filename = os.path.join("data", os.path.basename(z.filename))
- zip_tempname = zip_filename + ".new"
-
- syslog.syslog("Creating %s" % zip_tempname)
- f = sftp.open(zip_tempname, "wb")
- z.set_output_stream(f)
-
- syslog.syslog("Writing %s to zip" % torrent_file)
- z.write(
- torrent_file,
- arcname = os.path.basename(torrent_file),
- compress_type = zipfile.ZIP_DEFLATED)
-
- manifest_name = z.torrent_name + ".manifest"
-
- syslog.syslog("Writing %s to zip" % manifest_name)
- zi = zipfile.ZipInfo(manifest_name, time.gmtime()[:6])
- zi.external_attr = (stat.S_IFREG | 0644) << 16
- zi.internal_attr = 1 # Text, not binary
- z.writestr(zi,
- "".join("%s %s\n" % (v, k) for k, v in manifest.iteritems()),
- zipfile.ZIP_DEFLATED)
-
- syslog.syslog("Closing %s and renaming to %s" % (zip_tempname, zip_filename))
- z.close()
- f.close()
- sftp.atomic_rename(zip_tempname, zip_filename)
-
- syslog.syslog("Closing upload connection")
- ssh.close()
-
-def mirror_main():
- client = TransmissionClient()
- torrent_names = []
-
- for zip_url in cfg.zip_urls:
- if zip_url != cfg.generate_url:
- z = ZipFile(url = zip_url, dn = cfg.zip_dir, ta = cfg.zip_ta)
- if z.fetch():
- client.remove_torrents(z.torrent_name)
- syslog.syslog("Mirroring torrent %s" % z.torrent_name)
- client.add(z.get_torrent())
- torrent_names.append(z.torrent_name)
-
- if torrent_names:
- client.unlimited_seeding(*torrent_names)
+ import paramiko
+ class SFTPClient(paramiko.SFTPClient):
+ def atomic_rename(self, oldpath, newpath):
+ oldpath = self._adjust_cwd(oldpath)
+ newpath = self._adjust_cwd(newpath)
+ self._log(paramiko.common.DEBUG, 'atomic_rename(%r, %r)' % (oldpath, newpath))
+ self._request(paramiko.sftp.CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath)
-def poll_main():
- for zip_url in cfg.zip_urls:
-
- z = ZipFile(url = zip_url, dn = cfg.zip_dir, ta = cfg.zip_ta)
+ z = ZipFile(url = cfg.generate_url, dn = cfg.zip_dir)
client = TransmissionClient()
- if z.fetch():
- client.remove_torrents(z.torrent_name)
- syslog.syslog("Adding torrent %s" % z.torrent_name)
- client.add(z.get_torrent())
-
- elif cfg.run_rcynic_anyway:
- run_rcynic(client, z)
-
-
-def torrent_completion_main():
- torrent_name = os.getenv("TR_TORRENT_NAME")
- torrent_id = int(os.getenv("TR_TORRENT_ID"))
-
- z = ZipFile(url = cfg.find_url(torrent_name), dn = cfg.zip_dir, ta = cfg.zip_ta)
- client = TransmissionClient()
- torrent = client.info([torrent_id]).popitem()[1]
+ client.remove_torrents(z.torrent_name)
- if torrent.name != torrent_name:
- raise InconsistentEnvironment("Torrent name %s does not match ID %d" % (torrent_name, torrent_id))
+ download_dir = client.get_session().download_dir
+ torrent_dir = os.path.join(download_dir, z.torrent_name)
+ torrent_file = os.path.join(cfg.zip_dir, z.torrent_name + ".torrent")
- if z.torrent_name != torrent_name:
- raise InconsistentEnvironment("Torrent name %s does not match torrent name in zip file %s" % (torrent_name, z.torrent_name))
- if torrent is None or torrent.progress != 100:
- raise TorrentNotReady("Torrent %s not ready for checking, how did I get here?" % torrent_name)
+ syslog.syslog("Synchronizing local data from %s to %s" % (cfg.unauthenticated, torrent_dir))
+ subprocess.check_call((cfg.rsync_prog, "--archive", "--delete",
+ os.path.normpath(cfg.unauthenticated) + "/",
+ os.path.normpath(torrent_dir) + "/"))
- log_email("Download complete %s" % z.url)
-
- run_rcynic(client, z)
-
-
-def run_rcynic(client, z):
- """
- Run rcynic and any post-processing we might want.
- """
-
- if cfg.lockfile is not None:
- syslog.syslog("Acquiring lock %s" % cfg.lockfile)
- lock = os.open(cfg.lockfile, os.O_WRONLY | os.O_CREAT, 0600)
- fcntl.flock(lock, fcntl.LOCK_EX)
- else:
- lock = None
-
- syslog.syslog("Checking manifest against disk")
-
- download_dir = client.get_session().download_dir
-
- manifest_from_disk = create_manifest(download_dir, z.torrent_name)
- manifest_from_zip = z.get_manifest()
+ syslog.syslog("Creating %s" % torrent_file)
+ try:
+ os.unlink(torrent_file)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ ignore_output_for_now = subprocess.check_output( # pylint: disable=W0612
+ (cfg.mktorrent_prog,
+ "-a", cfg.tracker_url,
+ "-c", "RPKI unauthenticated data snapshot generated by rpki-torrent",
+ "-o", torrent_file,
+ torrent_dir))
+
+ syslog.syslog("Generating manifest")
+ manifest = create_manifest(download_dir, z.torrent_name)
+
+ syslog.syslog("Loading %s with unlimited seeding" % torrent_file)
+ f = open(torrent_file, "rb")
+ client.add(base64.b64encode(f.read()))
+ f.close()
+ client.unlimited_seeding(z.torrent_name)
- excess_files = set(manifest_from_disk) - set(manifest_from_zip)
- for fn in excess_files:
- del manifest_from_disk[fn]
+ syslog.syslog("Creating upload connection")
+ ssh = paramiko.Transport((cfg.sftp_host, cfg.sftp_port))
+ try:
+ hostkeys = paramiko.util.load_host_keys(cfg.sftp_hostkey_file)[cfg.sftp_host]["ssh-rsa"]
+ except ConfigParser.Error:
+ hostkeys = None
+ ssh.connect(
+ username = cfg.sftp_user,
+ hostkey = hostkeys,
+ pkey = paramiko.RSAKey.from_private_key_file(cfg.sftp_private_key_file))
+ sftp = SFTPClient.from_transport(ssh)
+
+ zip_filename = os.path.join("data", os.path.basename(z.filename))
+ zip_tempname = zip_filename + ".new"
+
+ syslog.syslog("Creating %s" % zip_tempname)
+ f = sftp.open(zip_tempname, "wb")
+ z.set_output_stream(f)
+
+ syslog.syslog("Writing %s to zip" % torrent_file)
+ z.write(
+ torrent_file,
+ arcname = os.path.basename(torrent_file),
+ compress_type = zipfile.ZIP_DEFLATED)
+
+ manifest_name = z.torrent_name + ".manifest"
+
+ syslog.syslog("Writing %s to zip" % manifest_name)
+ zi = zipfile.ZipInfo(manifest_name, time.gmtime()[:6])
+ zi.external_attr = (stat.S_IFREG | 0644) << 16
+ zi.internal_attr = 1 # Text, not binary
+ z.writestr(zi,
+ "".join("%s %s\n" % (v, k) for k, v in manifest.iteritems()),
+ zipfile.ZIP_DEFLATED)
+
+ syslog.syslog("Closing %s and renaming to %s" % (zip_tempname, zip_filename))
+ z.close()
+ f.close()
+ sftp.atomic_rename(zip_tempname, zip_filename)
- if manifest_from_disk != manifest_from_zip:
- raise TorrentDoesNotMatchManifest("Manifest for torrent %s does not match what we got" %
- z.torrent_name)
+ syslog.syslog("Closing upload connection")
+ ssh.close()
- if excess_files:
- syslog.syslog("Cleaning up excess files")
- for fn in excess_files:
- os.unlink(os.path.join(download_dir, fn))
+def mirror_main():
+ client = TransmissionClient()
+ torrent_names = []
- syslog.syslog("Running rcynic")
- log_email("Starting rcynic %s" % z.url)
- subprocess.check_call((cfg.rcynic_prog,
- "-c", cfg.rcynic_conf,
- "-u", os.path.join(client.get_session().download_dir, z.torrent_name)))
- log_email("Completed rcynic %s" % z.url)
+ for zip_url in cfg.zip_urls:
+ if zip_url != cfg.generate_url:
+ z = ZipFile(url = zip_url, dn = cfg.zip_dir, ta = cfg.zip_ta)
+ if z.fetch():
+ client.remove_torrents(z.torrent_name)
+ syslog.syslog("Mirroring torrent %s" % z.torrent_name)
+ client.add(z.get_torrent())
+ torrent_names.append(z.torrent_name)
- for cmd in cfg.post_rcynic_commands:
- syslog.syslog("Running post-rcynic command: %s" % cmd)
- subprocess.check_call(cmd, shell = True)
+ if torrent_names:
+ client.unlimited_seeding(*torrent_names)
- if lock is not None:
- syslog.syslog("Releasing lock %s" % cfg.lockfile)
- os.close(lock)
-# See http://www.minstrel.org.uk/papers/sftp/ for details on how to
-# set up safe upload-only SFTP directories on the server. In
-# particular http://www.minstrel.org.uk/papers/sftp/builtin/ is likely
-# to be the right path.
+def poll_main():
+ for zip_url in cfg.zip_urls:
+ z = ZipFile(url = zip_url, dn = cfg.zip_dir, ta = cfg.zip_ta)
+ client = TransmissionClient()
-class ZipFile(object):
- """
- Augmented version of standard python zipfile.ZipFile class, with
- some extra methods and specialized capabilities.
-
- All methods of the standard zipfile.ZipFile class are supported, but
- the constructor arguments are different, and opening the zip file
- itself is deferred until a call which requires this, since the file
- may first need to be fetched via HTTPS.
- """
-
- def __init__(self, url, dn, ta = None, verbose = True):
- self.url = url
- self.dir = dn
- self.ta = ta
- self.verbose = verbose
- self.filename = os.path.join(dn, os.path.basename(url))
- self.changed = False
- self.zf = None
- self.peercert = None
- self.torrent_name, zip_ext = os.path.splitext(os.path.basename(url))
- if zip_ext != ".zip":
- raise BadFormat
-
-
- def __getattr__(self, name):
- if self.zf is None:
- self.zf = zipfile.ZipFile(self.filename)
- return getattr(self.zf, name)
-
-
- def build_opener(self):
- """
- Voodoo to create a urllib2.OpenerDirector object with TLS
- certificate checking enabled and a hook to set self.peercert so
- our caller can check the subjectAltName field.
+ if z.fetch():
+ client.remove_torrents(z.torrent_name)
+ syslog.syslog("Adding torrent %s" % z.torrent_name)
+ client.add(z.get_torrent())
- You probably don't want to look at this if you can avoid it.
- """
+ elif cfg.run_rcynic_anyway:
+ run_rcynic(client, z)
- assert self.ta is not None
- # Yes, we're constructing one-off classes. Look away, look away.
+def torrent_completion_main():
+ torrent_name = os.getenv("TR_TORRENT_NAME")
+ torrent_id = int(os.getenv("TR_TORRENT_ID"))
- class HTTPSConnection(httplib.HTTPSConnection):
- zip = self
- def connect(self):
- sock = socket.create_connection((self.host, self.port), self.timeout)
- if getattr(self, "_tunnel_host", None):
- self.sock = sock
- self._tunnel()
- self.sock = ssl.wrap_socket(sock,
- keyfile = self.key_file,
- certfile = self.cert_file,
- cert_reqs = ssl.CERT_REQUIRED,
- ssl_version = ssl.PROTOCOL_TLSv1,
- ca_certs = self.zip.ta)
- self.zip.peercert = self.sock.getpeercert()
+ z = ZipFile(url = cfg.find_url(torrent_name), dn = cfg.zip_dir, ta = cfg.zip_ta)
+ client = TransmissionClient()
+ torrent = client.info([torrent_id]).popitem()[1]
- class HTTPSHandler(urllib2.HTTPSHandler):
- def https_open(self, req):
- return self.do_open(HTTPSConnection, req)
+ if torrent.name != torrent_name:
+ raise InconsistentEnvironment("Torrent name %s does not match ID %d" % (torrent_name, torrent_id))
- return urllib2.build_opener(HTTPSHandler)
+ if z.torrent_name != torrent_name:
+ raise InconsistentEnvironment("Torrent name %s does not match torrent name in zip file %s" % (torrent_name, z.torrent_name))
+ if torrent is None or torrent.progress != 100:
+ raise TorrentNotReady("Torrent %s not ready for checking, how did I get here?" % torrent_name)
- def check_subjectAltNames(self):
- """
- Check self.peercert against URL to make sure we were talking to
- the right HTTPS server.
- """
+ log_email("Download complete %s" % z.url)
- hostname = urlparse.urlparse(self.url).hostname
- subjectAltNames = set(i[1]
- for i in self.peercert.get("subjectAltName", ())
- if i[0] == "DNS")
- if hostname not in subjectAltNames:
- raise WrongServer
+ run_rcynic(client, z)
- def download_file(self, r, bufsize = 4096):
+def run_rcynic(client, z):
"""
- Downloaded file to disk.
+ Run rcynic and any post-processing we might want.
"""
- tempname = self.filename + ".new"
- f = open(tempname, "wb")
- n = int(r.info()["Content-Length"])
- for i in xrange(0, n - bufsize, bufsize): # pylint: disable=W0612
- f.write(r.read(bufsize))
- f.write(r.read())
- f.close()
- mtime = email.utils.mktime_tz(email.utils.parsedate_tz(r.info()["Last-Modified"]))
- os.utime(tempname, (mtime, mtime))
- os.rename(tempname, self.filename)
+ if cfg.lockfile is not None:
+ syslog.syslog("Acquiring lock %s" % cfg.lockfile)
+ lock = os.open(cfg.lockfile, os.O_WRONLY | os.O_CREAT, 0600)
+ fcntl.flock(lock, fcntl.LOCK_EX)
+ else:
+ lock = None
+ syslog.syslog("Checking manifest against disk")
- def set_output_stream(self, stream):
- """
- Set up this zip file for writing to a network stream.
- """
+ download_dir = client.get_session().download_dir
- assert self.zf is None
- self.zf = zipfile.ZipFile(stream, "w")
+ manifest_from_disk = create_manifest(download_dir, z.torrent_name)
+ manifest_from_zip = z.get_manifest()
+ excess_files = set(manifest_from_disk) - set(manifest_from_zip)
+ for fn in excess_files:
+ del manifest_from_disk[fn]
- def fetch(self):
- """
- Fetch zip file from URL given to constructor.
- """
+ if manifest_from_disk != manifest_from_zip:
+ raise TorrentDoesNotMatchManifest("Manifest for torrent %s does not match what we got" %
+ z.torrent_name)
- headers = { "User-Agent" : "rpki-torrent" }
- try:
- headers["If-Modified-Since"] = email.utils.formatdate(
- os.path.getmtime(self.filename), False, True)
- except OSError:
- pass
+ if excess_files:
+ syslog.syslog("Cleaning up excess files")
+ for fn in excess_files:
+ os.unlink(os.path.join(download_dir, fn))
- syslog.syslog("Checking %s..." % self.url)
- try:
- r = self.build_opener().open(urllib2.Request(self.url, None, headers))
- syslog.syslog("%s has changed, starting download" % self.url)
- self.changed = True
- log_email("Downloading %s" % self.url)
- except urllib2.HTTPError, e:
- if e.code == 304:
- syslog.syslog("%s has not changed" % self.url)
- elif e.code == 404:
- syslog.syslog("%s does not exist" % self.url)
- else:
- raise
- r = None
-
- self.check_subjectAltNames()
+ syslog.syslog("Running rcynic")
+ log_email("Starting rcynic %s" % z.url)
+ subprocess.check_call((cfg.rcynic_prog,
+ "-c", cfg.rcynic_conf,
+ "-u", os.path.join(client.get_session().download_dir, z.torrent_name)))
+ log_email("Completed rcynic %s" % z.url)
- if r is not None and r.geturl() != self.url:
- raise UnexpectedRedirect
+ for cmd in cfg.post_rcynic_commands:
+ syslog.syslog("Running post-rcynic command: %s" % cmd)
+ subprocess.check_call(cmd, shell = True)
- if r is not None:
- self.download_file(r)
- r.close()
+ if lock is not None:
+ syslog.syslog("Releasing lock %s" % cfg.lockfile)
+ os.close(lock)
- return self.changed
+# See http://www.minstrel.org.uk/papers/sftp/ for details on how to
+# set up safe upload-only SFTP directories on the server. In
+# particular http://www.minstrel.org.uk/papers/sftp/builtin/ is likely
+# to be the right path.
- def check_format(self):
- """
- Make sure that format of zip file matches our preconceptions: it
- should contain two files, one of which is the .torrent file, the
- other is the manifest, with names derived from the torrent name
- inferred from the URL.
+class ZipFile(object):
"""
+ Augmented version of standard python zipfile.ZipFile class, with
+ some extra methods and specialized capabilities.
- if set(self.namelist()) != set((self.torrent_name + ".torrent", self.torrent_name + ".manifest")):
- raise BadFormat
-
-
- def get_torrent(self):
- """
- Extract torrent file from zip file, encoded in Base64 because
- that's what the transmisionrpc library says it wants.
+ All methods of the standard zipfile.ZipFile class are supported, but
+ the constructor arguments are different, and opening the zip file
+ itself is deferred until a call which requires this, since the file
+ may first need to be fetched via HTTPS.
"""
- self.check_format()
- return base64.b64encode(self.read(self.torrent_name + ".torrent"))
+ def __init__(self, url, dn, ta = None, verbose = True):
+ self.url = url
+ self.dir = dn
+ self.ta = ta
+ self.verbose = verbose
+ self.filename = os.path.join(dn, os.path.basename(url))
+ self.changed = False
+ self.zf = None
+ self.peercert = None
+ self.torrent_name, zip_ext = os.path.splitext(os.path.basename(url))
+ if zip_ext != ".zip":
+ raise BadFormat
+
+
+ def __getattr__(self, name):
+ if self.zf is None:
+ self.zf = zipfile.ZipFile(self.filename)
+ return getattr(self.zf, name)
+
+
+ def build_opener(self):
+ """
+ Voodoo to create a urllib2.OpenerDirector object with TLS
+ certificate checking enabled and a hook to set self.peercert so
+ our caller can check the subjectAltName field.
+
+ You probably don't want to look at this if you can avoid it.
+ """
+
+ assert self.ta is not None
+
+ # Yes, we're constructing one-off classes. Look away, look away.
+
+ class HTTPSConnection(httplib.HTTPSConnection):
+ zip = self
+ def connect(self):
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+ if getattr(self, "_tunnel_host", None):
+ self.sock = sock
+ self._tunnel()
+ self.sock = ssl.wrap_socket(sock,
+ keyfile = self.key_file,
+ certfile = self.cert_file,
+ cert_reqs = ssl.CERT_REQUIRED,
+ ssl_version = ssl.PROTOCOL_TLSv1,
+ ca_certs = self.zip.ta)
+ self.zip.peercert = self.sock.getpeercert()
+
+ class HTTPSHandler(urllib2.HTTPSHandler):
+ def https_open(self, req):
+ return self.do_open(HTTPSConnection, req)
+
+ return urllib2.build_opener(HTTPSHandler)
+
+
+ def check_subjectAltNames(self):
+ """
+ Check self.peercert against URL to make sure we were talking to
+ the right HTTPS server.
+ """
+
+ hostname = urlparse.urlparse(self.url).hostname
+ subjectAltNames = set(i[1]
+ for i in self.peercert.get("subjectAltName", ())
+ if i[0] == "DNS")
+ if hostname not in subjectAltNames:
+ raise WrongServer
+
+
+ def download_file(self, r, bufsize = 4096):
+ """
+ Downloaded file to disk.
+ """
+
+ tempname = self.filename + ".new"
+ f = open(tempname, "wb")
+ n = int(r.info()["Content-Length"])
+ for i in xrange(0, n - bufsize, bufsize): # pylint: disable=W0612
+ f.write(r.read(bufsize))
+ f.write(r.read())
+ f.close()
+ mtime = email.utils.mktime_tz(email.utils.parsedate_tz(r.info()["Last-Modified"]))
+ os.utime(tempname, (mtime, mtime))
+ os.rename(tempname, self.filename)
+
+
+ def set_output_stream(self, stream):
+ """
+ Set up this zip file for writing to a network stream.
+ """
+
+ assert self.zf is None
+ self.zf = zipfile.ZipFile(stream, "w")
+
+
+ def fetch(self):
+ """
+ Fetch zip file from URL given to constructor.
+ """
+
+ headers = { "User-Agent" : "rpki-torrent" }
+ try:
+ headers["If-Modified-Since"] = email.utils.formatdate(
+ os.path.getmtime(self.filename), False, True)
+ except OSError:
+ pass
+
+ syslog.syslog("Checking %s..." % self.url)
+ try:
+ r = self.build_opener().open(urllib2.Request(self.url, None, headers))
+ syslog.syslog("%s has changed, starting download" % self.url)
+ self.changed = True
+ log_email("Downloading %s" % self.url)
+ except urllib2.HTTPError, e:
+ if e.code == 304:
+ syslog.syslog("%s has not changed" % self.url)
+ elif e.code == 404:
+ syslog.syslog("%s does not exist" % self.url)
+ else:
+ raise
+ r = None
+
+ self.check_subjectAltNames()
+
+ if r is not None and r.geturl() != self.url:
+ raise UnexpectedRedirect
+
+ if r is not None:
+ self.download_file(r)
+ r.close()
+
+ return self.changed
+
+
+ def check_format(self):
+ """
+ Make sure that format of zip file matches our preconceptions: it
+ should contain two files, one of which is the .torrent file, the
+ other is the manifest, with names derived from the torrent name
+ inferred from the URL.
+ """
+
+ if set(self.namelist()) != set((self.torrent_name + ".torrent", self.torrent_name + ".manifest")):
+ raise BadFormat
+
+
+ def get_torrent(self):
+ """
+ Extract torrent file from zip file, encoded in Base64 because
+ that's what the transmisionrpc library says it wants.
+ """
+
+ self.check_format()
+ return base64.b64encode(self.read(self.torrent_name + ".torrent"))
+
+
+ def get_manifest(self):
+ """
+ Extract manifest from zip file, as a dictionary.
+
+ For the moment we're fixing up the internal file names from the
+ format that the existing shell-script prototype uses, but this
+ should go away once this program both generates and checks the
+ manifests.
+ """
+
+ self.check_format()
+ result = {}
+ for line in self.open(self.torrent_name + ".manifest"):
+ h, fn = line.split()
+ #
+ # Fixup for earlier manifest format, this should go away
+ if not fn.startswith(self.torrent_name):
+ fn = os.path.normpath(os.path.join(self.torrent_name, fn))
+ #
+ result[fn] = h
+ return result
- def get_manifest(self):
+def create_manifest(topdir, torrent_name):
"""
- Extract manifest from zip file, as a dictionary.
-
- For the moment we're fixing up the internal file names from the
- format that the existing shell-script prototype uses, but this
- should go away once this program both generates and checks the
- manifests.
+ Generate a manifest, expressed as a dictionary.
"""
- self.check_format()
result = {}
- for line in self.open(self.torrent_name + ".manifest"):
- h, fn = line.split()
- #
- # Fixup for earlier manifest format, this should go away
- if not fn.startswith(self.torrent_name):
- fn = os.path.normpath(os.path.join(self.torrent_name, fn))
- #
- result[fn] = h
+ topdir = os.path.abspath(topdir)
+ for dirpath, dirnames, filenames in os.walk(os.path.join(topdir, torrent_name)): # pylint: disable=W0612
+ for filename in filenames:
+ filename = os.path.join(dirpath, filename)
+ f = open(filename, "rb")
+ result[os.path.relpath(filename, topdir)] = hashlib.sha256(f.read()).hexdigest()
+ f.close()
return result
-def create_manifest(topdir, torrent_name):
- """
- Generate a manifest, expressed as a dictionary.
- """
-
- result = {}
- topdir = os.path.abspath(topdir)
- for dirpath, dirnames, filenames in os.walk(os.path.join(topdir, torrent_name)): # pylint: disable=W0612
- for filename in filenames:
- filename = os.path.join(dirpath, filename)
- f = open(filename, "rb")
- result[os.path.relpath(filename, topdir)] = hashlib.sha256(f.read()).hexdigest()
- f.close()
- return result
-
-
def log_email(msg, subj = None):
- try:
- if not msg.endswith("\n"):
- msg += "\n"
- if subj is None:
- subj = msg.partition("\n")[0]
- m = email.mime.text.MIMEText(msg)
- m["Date"] = time.strftime("%d %b %Y %H:%M:%S +0000", time.gmtime())
- m["From"] = cfg.log_email
- m["To"] = cfg.log_email
- m["Subject"] = subj
- s = smtplib.SMTP("localhost")
- s.sendmail(cfg.log_email, [cfg.log_email], m.as_string())
- s.quit()
- except ConfigParser.Error:
- pass
+ try:
+ if not msg.endswith("\n"):
+ msg += "\n"
+ if subj is None:
+ subj = msg.partition("\n")[0]
+ m = email.mime.text.MIMEText(msg)
+ m["Date"] = time.strftime("%d %b %Y %H:%M:%S +0000", time.gmtime())
+ m["From"] = cfg.log_email
+ m["To"] = cfg.log_email
+ m["Subject"] = subj
+ s = smtplib.SMTP("localhost")
+ s.sendmail(cfg.log_email, [cfg.log_email], m.as_string())
+ s.quit()
+ except ConfigParser.Error:
+ pass
class TransmissionClient(transmissionrpc.client.Client):
- """
- Extension of transmissionrpc.client.Client.
- """
-
- def __init__(self, **kwargs):
- kwargs.setdefault("address", "127.0.0.1")
- kwargs.setdefault("user", cfg.transmission_username)
- kwargs.setdefault("password", cfg.transmission_password)
- transmissionrpc.client.Client.__init__(self, **kwargs)
-
-
- def find_torrents(self, *names):
- """
- Find torrents with given name(s), return id(s).
- """
-
- result = [i for i, t in self.list().iteritems() if t.name in names]
- if not result:
- raise CouldNotFindTorrents
- return result
-
-
- def remove_torrents(self, *names):
"""
- Remove any torrents with the given name(s).
+ Extension of transmissionrpc.client.Client.
"""
- try:
- ids = self.find_torrents(*names)
- except CouldNotFindTorrents:
- pass
- else:
- syslog.syslog("Removing torrent%s %s (%s)" % (
- "" if len(ids) == 1 else "s",
- ", ".join(names),
- ", ".join("#%s" % i for i in ids)))
- self.remove(ids)
+ def __init__(self, **kwargs):
+ kwargs.setdefault("address", "127.0.0.1")
+ kwargs.setdefault("user", cfg.transmission_username)
+ kwargs.setdefault("password", cfg.transmission_password)
+ transmissionrpc.client.Client.__init__(self, **kwargs)
- def unlimited_seeding(self, *names):
- """
- Set unlimited seeding for specified torrents.
- """
- # Apparently seedRatioMode = 2 means "no limit"
- try:
- self.change(self.find_torrents(*names), seedRatioMode = 2)
- except CouldNotFindTorrents:
- syslog.syslog("Couldn't tweak seedRatioMode, blundering onwards")
+ def find_torrents(self, *names):
+ """
+ Find torrents with given name(s), return id(s).
+ """
+ result = [i for i, t in self.list().iteritems() if t.name in names]
+ if not result:
+ raise CouldNotFindTorrents
+ return result
-class MyConfigParser(ConfigParser.RawConfigParser):
- rpki_torrent_section = "rpki-torrent"
+ def remove_torrents(self, *names):
+ """
+ Remove any torrents with the given name(s).
+ """
- @property
- def zip_dir(self):
- return self.get(self.rpki_torrent_section, "zip_dir")
+ try:
+ ids = self.find_torrents(*names)
+ except CouldNotFindTorrents:
+ pass
+ else:
+ syslog.syslog("Removing torrent%s %s (%s)" % (
+ "" if len(ids) == 1 else "s",
+ ", ".join(names),
+ ", ".join("#%s" % i for i in ids)))
+ self.remove(ids)
- @property
- def zip_ta(self):
- return self.get(self.rpki_torrent_section, "zip_ta")
+ def unlimited_seeding(self, *names):
+ """
+ Set unlimited seeding for specified torrents.
+ """
- @property
- def rcynic_prog(self):
- return self.get(self.rpki_torrent_section, "rcynic_prog")
+ # Apparently seedRatioMode = 2 means "no limit"
+ try:
+ self.change(self.find_torrents(*names), seedRatioMode = 2)
+ except CouldNotFindTorrents:
+ syslog.syslog("Couldn't tweak seedRatioMode, blundering onwards")
- @property
- def rcynic_conf(self):
- return self.get(self.rpki_torrent_section, "rcynic_conf")
- @property
- def run_rcynic_anyway(self):
- return self.getboolean(self.rpki_torrent_section, "run_rcynic_anyway")
-
- @property
- def generate_url(self):
- return self.get(self.rpki_torrent_section, "generate_url")
-
- @property
- def act_as_generator(self):
- try:
- return self.get(self.rpki_torrent_section, "generate_url") != ""
- except ConfigParser.Error:
- return False
-
- @property
- def rsync_prog(self):
- return self.get(self.rpki_torrent_section, "rsync_prog")
-
- @property
- def mktorrent_prog(self):
- return self.get(self.rpki_torrent_section, "mktorrent_prog")
-
- @property
- def tracker_url(self):
- return self.get(self.rpki_torrent_section, "tracker_url")
-
- @property
- def sftp_host(self):
- return self.get(self.rpki_torrent_section, "sftp_host")
-
- @property
- def sftp_port(self):
- try:
- return self.getint(self.rpki_torrent_section, "sftp_port")
- except ConfigParser.Error:
- return 22
-
- @property
- def sftp_user(self):
- return self.get(self.rpki_torrent_section, "sftp_user")
-
- @property
- def sftp_hostkey_file(self):
- return self.get(self.rpki_torrent_section, "sftp_hostkey_file")
-
- @property
- def sftp_private_key_file(self):
- return self.get(self.rpki_torrent_section, "sftp_private_key_file")
-
- @property
- def lockfile(self):
- try:
- return self.get(self.rpki_torrent_section, "lockfile")
- except ConfigParser.Error:
- return None
-
- @property
- def unauthenticated(self):
- try:
- return self.get(self.rpki_torrent_section, "unauthenticated")
- except ConfigParser.Error:
- return self.get("rcynic", "unauthenticated")
-
- @property
- def log_email(self):
- return self.get(self.rpki_torrent_section, "log_email")
-
- @property
- def transmission_username(self):
- try:
- return self.get(self.rpki_torrent_section, "transmission_username")
- except ConfigParser.Error:
- return None
+class MyConfigParser(ConfigParser.RawConfigParser):
- @property
- def transmission_password(self):
- try:
- return self.get(self.rpki_torrent_section, "transmission_password")
- except ConfigParser.Error:
- return None
-
- def multioption_iter(self, name, getter = None):
- if getter is None:
- getter = self.get
- if self.has_option(self.rpki_torrent_section, name):
- yield getter(self.rpki_torrent_section, name)
- name += "."
- names = [i for i in self.options(self.rpki_torrent_section) if i.startswith(name) and i[len(name):].isdigit()]
- names.sort(key = lambda s: int(s[len(name):])) # pylint: disable=W0631
- for name in names:
- yield getter(self.rpki_torrent_section, name)
-
- @property
- def zip_urls(self):
- return self.multioption_iter("zip_url")
-
- @property
- def post_rcynic_commands(self):
- return self.multioption_iter("post_rcynic_command")
-
- def find_url(self, torrent_name):
- urls = [u for u in self.zip_urls
- if os.path.splitext(os.path.basename(u))[0] == torrent_name]
- if len(urls) != 1:
- raise TorrentNameDoesNotMatchURL("Can't find URL matching torrent name %s" % torrent_name)
- return urls[0]
+ rpki_torrent_section = "rpki-torrent"
+
+ @property
+ def zip_dir(self):
+ return self.get(self.rpki_torrent_section, "zip_dir")
+
+ @property
+ def zip_ta(self):
+ return self.get(self.rpki_torrent_section, "zip_ta")
+
+ @property
+ def rcynic_prog(self):
+ return self.get(self.rpki_torrent_section, "rcynic_prog")
+
+ @property
+ def rcynic_conf(self):
+ return self.get(self.rpki_torrent_section, "rcynic_conf")
+
+ @property
+ def run_rcynic_anyway(self):
+ return self.getboolean(self.rpki_torrent_section, "run_rcynic_anyway")
+
+ @property
+ def generate_url(self):
+ return self.get(self.rpki_torrent_section, "generate_url")
+
+ @property
+ def act_as_generator(self):
+ try:
+ return self.get(self.rpki_torrent_section, "generate_url") != ""
+ except ConfigParser.Error:
+ return False
+
+ @property
+ def rsync_prog(self):
+ return self.get(self.rpki_torrent_section, "rsync_prog")
+
+ @property
+ def mktorrent_prog(self):
+ return self.get(self.rpki_torrent_section, "mktorrent_prog")
+
+ @property
+ def tracker_url(self):
+ return self.get(self.rpki_torrent_section, "tracker_url")
+
+ @property
+ def sftp_host(self):
+ return self.get(self.rpki_torrent_section, "sftp_host")
+
+ @property
+ def sftp_port(self):
+ try:
+ return self.getint(self.rpki_torrent_section, "sftp_port")
+ except ConfigParser.Error:
+ return 22
+
+ @property
+ def sftp_user(self):
+ return self.get(self.rpki_torrent_section, "sftp_user")
+
+ @property
+ def sftp_hostkey_file(self):
+ return self.get(self.rpki_torrent_section, "sftp_hostkey_file")
+
+ @property
+ def sftp_private_key_file(self):
+ return self.get(self.rpki_torrent_section, "sftp_private_key_file")
+
+ @property
+ def lockfile(self):
+ try:
+ return self.get(self.rpki_torrent_section, "lockfile")
+ except ConfigParser.Error:
+ return None
+
+ @property
+ def unauthenticated(self):
+ try:
+ return self.get(self.rpki_torrent_section, "unauthenticated")
+ except ConfigParser.Error:
+ return self.get("rcynic", "unauthenticated")
+
+ @property
+ def log_email(self):
+ return self.get(self.rpki_torrent_section, "log_email")
+
+ @property
+ def transmission_username(self):
+ try:
+ return self.get(self.rpki_torrent_section, "transmission_username")
+ except ConfigParser.Error:
+ return None
+
+ @property
+ def transmission_password(self):
+ try:
+ return self.get(self.rpki_torrent_section, "transmission_password")
+ except ConfigParser.Error:
+ return None
+
+ def multioption_iter(self, name, getter = None):
+ if getter is None:
+ getter = self.get
+ if self.has_option(self.rpki_torrent_section, name):
+ yield getter(self.rpki_torrent_section, name)
+ name += "."
+ names = [i for i in self.options(self.rpki_torrent_section) if i.startswith(name) and i[len(name):].isdigit()]
+ names.sort(key = lambda s: int(s[len(name):])) # pylint: disable=W0631
+ for name in names:
+ yield getter(self.rpki_torrent_section, name)
+
+ @property
+ def zip_urls(self):
+ return self.multioption_iter("zip_url")
+
+ @property
+ def post_rcynic_commands(self):
+ return self.multioption_iter("post_rcynic_command")
+
+ def find_url(self, torrent_name):
+ urls = [u for u in self.zip_urls
+ if os.path.splitext(os.path.basename(u))[0] == torrent_name]
+ if len(urls) != 1:
+ raise TorrentNameDoesNotMatchURL("Can't find URL matching torrent name %s" % torrent_name)
+ return urls[0]
if __name__ == "__main__":
- main()
+ main()
diff --git a/rp/rcynic/validation_status b/rp/rcynic/validation_status
index a3ee36f1..f961f473 100755
--- a/rp/rcynic/validation_status
+++ b/rp/rcynic/validation_status
@@ -23,14 +23,14 @@ Flat text listing of <validation_status/> elements from rcynic.xml.
import sys
try:
- from lxml.etree import ElementTree
+ from lxml.etree import ElementTree
except ImportError:
- from xml.etree.ElementTree import ElementTree
+ from xml.etree.ElementTree import ElementTree
for filename in ([sys.stdin] if len(sys.argv) < 2 else sys.argv[1:]):
- for elt in ElementTree(file = filename).findall("validation_status"):
- print "%s %8s %-40s %s" % (
- elt.get("timestamp"),
- elt.get("generation"),
- elt.get("status"),
- elt.text.strip())
+ for elt in ElementTree(file = filename).findall("validation_status"):
+ print "%s %8s %-40s %s" % (
+ elt.get("timestamp"),
+ elt.get("generation"),
+ elt.get("status"),
+ elt.text.strip())
diff --git a/rp/rpki-rtr/rpki-rtr b/rp/rpki-rtr/rpki-rtr
index 5ad4cf26..7f3e6b4f 100755
--- a/rp/rpki-rtr/rpki-rtr
+++ b/rp/rpki-rtr/rpki-rtr
@@ -19,5 +19,5 @@
# PERFORMANCE OF THIS SOFTWARE.
if __name__ == "__main__":
- from rpki.rtr.main import main
- main()
+ from rpki.rtr.main import main
+ main()
diff --git a/rp/utils/find_roa b/rp/utils/find_roa
index 2b537bf4..15a2f25f 100755
--- a/rp/utils/find_roa
+++ b/rp/utils/find_roa
@@ -28,109 +28,109 @@ import rpki.oids
def check_dir(s):
- if os.path.isdir(s):
- return os.path.abspath(s)
- else:
- raise argparse.ArgumentTypeError("%r is not a directory" % s)
+ if os.path.isdir(s):
+ return os.path.abspath(s)
+ else:
+ raise argparse.ArgumentTypeError("%r is not a directory" % s)
def filename_to_uri(filename):
- if not filename.startswith(args.rcynic_dir):
- raise ValueError
- return "rsync://" + filename[len(args.rcynic_dir):].lstrip("/")
+ if not filename.startswith(args.rcynic_dir):
+ raise ValueError
+ return "rsync://" + filename[len(args.rcynic_dir):].lstrip("/")
def uri_to_filename(uri):
- if not uri.startswith("rsync://"):
- raise ValueError
- return os.path.join(args.rcynic_dir, uri[len("rsync://"):])
+ if not uri.startswith("rsync://"):
+ raise ValueError
+ return os.path.join(args.rcynic_dir, uri[len("rsync://"):])
class Prefix(object):
- """
- One prefix parsed from the command line.
- """
-
- def __init__(self, val):
- addr, length = val.split("/")
- length, sep, maxlength = length.partition("-") # pylint: disable=W0612
- self.prefix = rpki.POW.IPAddress(addr)
- self.length = int(length)
- self.maxlength = int(maxlength) if maxlength else self.length
- if self.maxlength < self.length or self.length < 0 or self.length > self.prefix.bits:
- raise ValueError
- if self.prefix & ((1 << (self.prefix.bits - self.length)) - 1) != 0:
- raise ValueError
-
- def matches(self, roa): # pylint: disable=W0621
- return any(self.prefix == prefix and
- self.length == length and
- (not args.match_maxlength or
- self.maxlength == maxlength or
- (maxlength is None and
- self.length == self.maxlength))
- for prefix, length, maxlength in roa.prefixes)
+ """
+ One prefix parsed from the command line.
+ """
+
+ def __init__(self, val):
+ addr, length = val.split("/")
+ length, sep, maxlength = length.partition("-") # pylint: disable=W0612
+ self.prefix = rpki.POW.IPAddress(addr)
+ self.length = int(length)
+ self.maxlength = int(maxlength) if maxlength else self.length
+ if self.maxlength < self.length or self.length < 0 or self.length > self.prefix.bits:
+ raise ValueError
+ if self.prefix & ((1 << (self.prefix.bits - self.length)) - 1) != 0:
+ raise ValueError
+
+ def matches(self, roa): # pylint: disable=W0621
+ return any(self.prefix == prefix and
+ self.length == length and
+ (not args.match_maxlength or
+ self.maxlength == maxlength or
+ (maxlength is None and
+ self.length == self.maxlength))
+ for prefix, length, maxlength in roa.prefixes)
class ROA(rpki.POW.ROA): # pylint: disable=W0232
- """
- Aspects of a ROA that we care about.
- """
-
- @classmethod
- def parse(cls, fn): # pylint: disable=W0621
- assert fn.startswith(args.rcynic_dir)
- self = cls.derReadFile(fn)
- self.fn = fn
- self.extractWithoutVerifying()
- v4, v6 = self.getPrefixes()
- self.prefixes = (v4 or ()) + (v6 or ())
- return self
-
- @property
- def uri(self):
- return filename_to_uri(self.fn)
-
- @property
- def formatted_prefixes(self):
- for prefix in self.prefixes:
- if prefix[2] is None or prefix[1] == prefix[2]:
- yield "%s/%d" % (prefix[0], prefix[1])
- else:
- yield "%s/%d-%d" % (prefix[0], prefix[1], prefix[2])
-
- def __str__(self):
- prefixes = " ".join(self.formatted_prefixes)
- plural = "es" if " " in prefixes else ""
- if args.show_inception:
- return "signingTime %s ASN %s prefix%s %s" % (self.signingTime(), self.getASID(), plural, prefixes)
- else:
- return "ASN %s prefix%s %s" % (self.getASID(), plural, prefixes)
-
- def show(self):
- print "%s %s" % (self, self.fn if args.show_filenames else self.uri)
-
- def show_expiration(self):
- print self
- x = self.certs()[0]
- fn = self.fn # pylint: disable=W0621
- uri = self.uri
- while uri is not None:
- name = fn if args.show_filenames else uri
- if args.show_inception:
- print "notBefore", x.getNotBefore(), "notAfter", x.getNotAfter(), name
- else:
- print x.getNotAfter(), name
- for uri in x.getAIA() or ():
- if uri.startswith("rsync://"):
- break
- else:
- break
- fn = uri_to_filename(uri)
- if not os.path.exists(fn):
- print "***** MISSING ******", uri
- break
- x = rpki.POW.X509.derReadFile(fn)
- print
+ """
+ Aspects of a ROA that we care about.
+ """
+
+ @classmethod
+ def parse(cls, fn): # pylint: disable=W0621
+ assert fn.startswith(args.rcynic_dir)
+ self = cls.derReadFile(fn)
+ self.fn = fn
+ self.extractWithoutVerifying()
+ v4, v6 = self.getPrefixes()
+ self.prefixes = (v4 or ()) + (v6 or ())
+ return self
+
+ @property
+ def uri(self):
+ return filename_to_uri(self.fn)
+
+ @property
+ def formatted_prefixes(self):
+ for prefix in self.prefixes:
+ if prefix[2] is None or prefix[1] == prefix[2]:
+ yield "%s/%d" % (prefix[0], prefix[1])
+ else:
+ yield "%s/%d-%d" % (prefix[0], prefix[1], prefix[2])
+
+ def __str__(self):
+ prefixes = " ".join(self.formatted_prefixes)
+ plural = "es" if " " in prefixes else ""
+ if args.show_inception:
+ return "signingTime %s ASN %s prefix%s %s" % (self.signingTime(), self.getASID(), plural, prefixes)
+ else:
+ return "ASN %s prefix%s %s" % (self.getASID(), plural, prefixes)
+
+ def show(self):
+ print "%s %s" % (self, self.fn if args.show_filenames else self.uri)
+
+ def show_expiration(self):
+ print self
+ x = self.certs()[0]
+ fn = self.fn # pylint: disable=W0621
+ uri = self.uri
+ while uri is not None:
+ name = fn if args.show_filenames else uri
+ if args.show_inception:
+ print "notBefore", x.getNotBefore(), "notAfter", x.getNotAfter(), name
+ else:
+ print x.getNotAfter(), name
+ for uri in x.getAIA() or ():
+ if uri.startswith("rsync://"):
+ break
+ else:
+ break
+ fn = uri_to_filename(uri)
+ if not os.path.exists(fn):
+ print "***** MISSING ******", uri
+ break
+ x = rpki.POW.X509.derReadFile(fn)
+ print
parser = argparse.ArgumentParser(description = __doc__)
@@ -145,14 +145,14 @@ args = parser.parse_args()
# If there's some way to automate this in the parser, I don't know what it is, so just catch it here.
if args.all != (not args.prefixes):
- parser.error("--all and prefix list are mutually exclusive")
+ parser.error("--all and prefix list are mutually exclusive")
for root, dirs, files in os.walk(args.rcynic_dir):
- for fn in files:
- if fn.endswith(".roa"):
- roa = ROA.parse(os.path.join(root, fn))
- if args.all or any(prefix.matches(roa) for prefix in args.prefixes):
- if args.show_expiration:
- roa.show_expiration()
- else:
- roa.show()
+ for fn in files:
+ if fn.endswith(".roa"):
+ roa = ROA.parse(os.path.join(root, fn))
+ if args.all or any(prefix.matches(roa) for prefix in args.prefixes):
+ if args.show_expiration:
+ roa.show_expiration()
+ else:
+ roa.show()
diff --git a/rp/utils/hashdir b/rp/utils/hashdir
index d3fe393c..c7c18350 100755
--- a/rp/utils/hashdir
+++ b/rp/utils/hashdir
@@ -30,10 +30,10 @@ import argparse
import rpki.POW
def check_dir(s):
- if os.path.isdir(s):
- return os.path.abspath(s)
- else:
- raise argparse.ArgumentTypeError("%r is not a directory" % s)
+ if os.path.isdir(s):
+ return os.path.abspath(s)
+ else:
+ raise argparse.ArgumentTypeError("%r is not a directory" % s)
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument("-v", "--verbose", action = "store_true", help = "whistle while you work")
@@ -42,26 +42,26 @@ parser.add_argument("output_dir", help = "name of output directory to create")
args = parser.parse_args()
if not os.path.isdir(args.output_dir):
- os.makedirs(args.output_dir)
+ os.makedirs(args.output_dir)
for root, dirs, files in os.walk(args.rcynic_dir):
- for ifn in files:
- ifn = os.path.join(root, ifn)
- if ifn.endswith(".cer"):
- obj = rpki.POW.X509.derReadFile(ifn)
- fmt = "%08x.%%d" % obj.getSubjectHash()
- elif ifn.endswith(".crl"):
- obj = rpki.POW.CRL.derReadFile(ifn)
- fmt = "%08x.r%%d" % obj.getIssuerHash()
- else:
- continue
- for i in xrange(1000000):
- ofn = os.path.join(args.output_dir, fmt % i)
- if not os.path.exists(ofn):
- with open(ofn, "w") as f:
- f.write(obj.pemWrite())
- if args.verbose:
- print ofn, "<=", ifn
- break
- else:
- sys.exit("No path name available for %s (%s)" % (ifn, ofn))
+ for ifn in files:
+ ifn = os.path.join(root, ifn)
+ if ifn.endswith(".cer"):
+ obj = rpki.POW.X509.derReadFile(ifn)
+ fmt = "%08x.%%d" % obj.getSubjectHash()
+ elif ifn.endswith(".crl"):
+ obj = rpki.POW.CRL.derReadFile(ifn)
+ fmt = "%08x.r%%d" % obj.getIssuerHash()
+ else:
+ continue
+ for i in xrange(1000000):
+ ofn = os.path.join(args.output_dir, fmt % i)
+ if not os.path.exists(ofn):
+ with open(ofn, "w") as f:
+ f.write(obj.pemWrite())
+ if args.verbose:
+ print ofn, "<=", ifn
+ break
+ else:
+ sys.exit("No path name available for %s (%s)" % (ifn, ofn))
diff --git a/rp/utils/print_roa b/rp/utils/print_roa
index c96a7c66..78ae244f 100755
--- a/rp/utils/print_roa
+++ b/rp/utils/print_roa
@@ -26,18 +26,18 @@ import rpki.POW
class ROA(rpki.POW.ROA): # pylint: disable=W0232
- @staticmethod
- def _format_prefix(p):
- if p[2] in (None, p[1]):
- return "%s/%d" % (p[0], p[1])
- else:
- return "%s/%d-%d" % (p[0], p[1], p[2])
+ @staticmethod
+ def _format_prefix(p):
+ if p[2] in (None, p[1]):
+ return "%s/%d" % (p[0], p[1])
+ else:
+ return "%s/%d-%d" % (p[0], p[1], p[2])
- def parse(self):
- self.extractWithoutVerifying()
- v4, v6 = self.getPrefixes()
- self.v4_prefixes = [self._format_prefix(p) for p in (v4 or ())]
- self.v6_prefixes = [self._format_prefix(p) for p in (v6 or ())]
+ def parse(self):
+ self.extractWithoutVerifying()
+ v4, v6 = self.getPrefixes()
+ self.v4_prefixes = [self._format_prefix(p) for p in (v4 or ())]
+ self.v6_prefixes = [self._format_prefix(p) for p in (v6 or ())]
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument("-b", "--brief", action = "store_true", help = "show only ASN and prefix(es)")
@@ -47,27 +47,27 @@ parser.add_argument("roas", nargs = "+", type = ROA.derReadFile, help = "ROA(s)
args = parser.parse_args()
for roa in args.roas:
- roa.parse()
- if args.brief:
- if args.signing_time:
- print roa.signingTime(),
- print roa.getASID(), " ".join(roa.v4_prefixes + roa.v6_prefixes)
- else:
- print "ROA Version: ", roa.getVersion()
- print "SigningTime: ", roa.signingTime()
- print "asID: ", roa.getASID()
- if roa.v4_prefixes:
- print " addressFamily:", 1
- for prefix in roa.v4_prefixes:
- print " IPAddress:", prefix
- if roa.v6_prefixes:
- print " addressFamily:", 2
- for prefix in roa.v6_prefixes:
- print " IPAddress:", prefix
- if args.cms:
- print roa.pprint()
- for cer in roa.certs():
- print cer.pprint()
- for crl in roa.crls():
- print crl.pprint()
- print
+ roa.parse()
+ if args.brief:
+ if args.signing_time:
+ print roa.signingTime(),
+ print roa.getASID(), " ".join(roa.v4_prefixes + roa.v6_prefixes)
+ else:
+ print "ROA Version: ", roa.getVersion()
+ print "SigningTime: ", roa.signingTime()
+ print "asID: ", roa.getASID()
+ if roa.v4_prefixes:
+ print " addressFamily:", 1
+ for prefix in roa.v4_prefixes:
+ print " IPAddress:", prefix
+ if roa.v6_prefixes:
+ print " addressFamily:", 2
+ for prefix in roa.v6_prefixes:
+ print " IPAddress:", prefix
+ if args.cms:
+ print roa.pprint()
+ for cer in roa.certs():
+ print cer.pprint()
+ for crl in roa.crls():
+ print crl.pprint()
+ print
diff --git a/rp/utils/print_rpki_manifest b/rp/utils/print_rpki_manifest
index ce9b25ea..83e5c16e 100755
--- a/rp/utils/print_rpki_manifest
+++ b/rp/utils/print_rpki_manifest
@@ -31,20 +31,20 @@ parser.add_argument("manifests", nargs = "+", type = rpki.POW.Manifest.derReadFi
args = parser.parse_args()
for mft in args.manifests:
- mft.extractWithoutVerifying()
- print "Manifest Version:", mft.getVersion()
- print "SigningTime: ", mft.signingTime()
- print "Number: ", mft.getManifestNumber()
- print "thisUpdate: ", mft.getThisUpdate()
- print "nextUpdate: ", mft.getNextUpdate()
- print "fileHashAlg: ", rpki.oids.oid2name(mft.getAlgorithm())
- for i, fah in enumerate(mft.getFiles()):
- name, obj_hash = fah
- print "fileList[%3d]: %s %s" % (i, ":".join(("%02X" % ord(h) for h in obj_hash)), name)
- if args.cms:
- print mft.pprint()
- for cer in mft.certs():
- print cer.pprint()
- for crl in mft.crls():
- print crl.pprint()
- print
+ mft.extractWithoutVerifying()
+ print "Manifest Version:", mft.getVersion()
+ print "SigningTime: ", mft.signingTime()
+ print "Number: ", mft.getManifestNumber()
+ print "thisUpdate: ", mft.getThisUpdate()
+ print "nextUpdate: ", mft.getNextUpdate()
+ print "fileHashAlg: ", rpki.oids.oid2name(mft.getAlgorithm())
+ for i, fah in enumerate(mft.getFiles()):
+ name, obj_hash = fah
+ print "fileList[%3d]: %s %s" % (i, ":".join(("%02X" % ord(h) for h in obj_hash)), name)
+ if args.cms:
+ print mft.pprint()
+ for cer in mft.certs():
+ print cer.pprint()
+ for crl in mft.crls():
+ print crl.pprint()
+ print
diff --git a/rp/utils/scan_roas b/rp/utils/scan_roas
index 4f3dc7f0..f4489f32 100755
--- a/rp/utils/scan_roas
+++ b/rp/utils/scan_roas
@@ -27,29 +27,29 @@ import argparse
import rpki.POW
def check_dir(d):
- if not os.path.isdir(d):
- raise argparse.ArgumentTypeError("%r is not a directory" % d)
- return d
+ if not os.path.isdir(d):
+ raise argparse.ArgumentTypeError("%r is not a directory" % d)
+ return d
class ROA(rpki.POW.ROA): # pylint: disable=W0232
- @classmethod
- def parse(cls, fn): # pylint: disable=W0621
- self = cls.derReadFile(fn)
- self.extractWithoutVerifying()
- return self
+ @classmethod
+ def parse(cls, fn): # pylint: disable=W0621
+ self = cls.derReadFile(fn)
+ self.extractWithoutVerifying()
+ return self
- @property
- def prefixes(self):
- v4, v6 = self.getPrefixes()
- for prefix, length, maxlength in (v4 or ()) + (v6 or ()):
- if maxlength is None or length == maxlength:
- yield "%s/%d" % (prefix, length)
- else:
- yield "%s/%d-%d" % (prefix, length, maxlength)
+ @property
+ def prefixes(self):
+ v4, v6 = self.getPrefixes()
+ for prefix, length, maxlength in (v4 or ()) + (v6 or ()):
+ if maxlength is None or length == maxlength:
+ yield "%s/%d" % (prefix, length)
+ else:
+ yield "%s/%d-%d" % (prefix, length, maxlength)
- def __str__(self):
- return "%s %s %s" % (self.signingTime(), self.getASID(), " ".join(self.prefixes))
+ def __str__(self):
+ return "%s %s %s" % (self.signingTime(), self.getASID(), " ".join(self.prefixes))
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument("rcynic_dir", nargs = "+", type = check_dir,
@@ -57,7 +57,7 @@ parser.add_argument("rcynic_dir", nargs = "+", type = check_dir,
args = parser.parse_args()
for rcynic_dir in args.rcynic_dir:
- for root, dirs, files in os.walk(rcynic_dir):
- for fn in files:
- if fn.endswith(".roa"):
- print ROA.parse(os.path.join(root, fn))
+ for root, dirs, files in os.walk(rcynic_dir):
+ for fn in files:
+ if fn.endswith(".roa"):
+ print ROA.parse(os.path.join(root, fn))
diff --git a/rp/utils/scan_routercerts b/rp/utils/scan_routercerts
index 081a6293..9a13d7a9 100755
--- a/rp/utils/scan_routercerts
+++ b/rp/utils/scan_routercerts
@@ -28,9 +28,9 @@ import rpki.POW
import rpki.oids
def check_dir(s):
- if not os.path.isdir(s):
- raise argparse.ArgumentTypeError("%r is not a directory" % s)
- return s
+ if not os.path.isdir(s):
+ raise argparse.ArgumentTypeError("%r is not a directory" % s)
+ return s
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument("rcynic_dir", type = check_dir, help = "rcynic authenticated output directory")
@@ -38,20 +38,20 @@ args = parser.parse_args()
for root, dirs, files in os.walk(args.rcynic_dir):
- for fn in files:
+ for fn in files:
- if not fn.endswith(".cer"):
- continue
+ if not fn.endswith(".cer"):
+ continue
- x = rpki.POW.X509.derReadFile(os.path.join(root, fn))
+ x = rpki.POW.X509.derReadFile(os.path.join(root, fn))
- if rpki.oids.id_kp_bgpsec_router not in (x.getEKU() or ()):
- continue
+ if rpki.oids.id_kp_bgpsec_router not in (x.getEKU() or ()):
+ continue
- sys.stdout.write(base64.urlsafe_b64encode(x.getSKI()).rstrip("="))
+ sys.stdout.write(base64.urlsafe_b64encode(x.getSKI()).rstrip("="))
- for min_asn, max_asn in x.getRFC3779()[0]:
- for asn in xrange(min_asn, max_asn + 1):
- sys.stdout.write(" %s" % asn)
+ for min_asn, max_asn in x.getRFC3779()[0]:
+ for asn in xrange(min_asn, max_asn + 1):
+ sys.stdout.write(" %s" % asn)
- sys.stdout.write(" %s\n" % base64.b64encode(x.getPublicKey().derWritePublic()))
+ sys.stdout.write(" %s\n" % base64.b64encode(x.getPublicKey().derWritePublic()))
diff --git a/rp/utils/uri b/rp/utils/uri
index df6e710b..4fecf73a 100755
--- a/rp/utils/uri
+++ b/rp/utils/uri
@@ -29,44 +29,44 @@ import rpki.POW
class Certificate(object):
- @staticmethod
- def first_whatever(uris, prefix):
- if uris is not None:
- for uri in uris:
- if uri.startswith(prefix):
- return uri
- return None
+ @staticmethod
+ def first_whatever(uris, prefix):
+ if uris is not None:
+ for uri in uris:
+ if uri.startswith(prefix):
+ return uri
+ return None
- def first_rsync(self, uris):
- return self.first_whatever(uris, "rsync://")
+ def first_rsync(self, uris):
+ return self.first_whatever(uris, "rsync://")
- def first_http(self, uris):
- return self.first_whatever(uris, "http://")
+ def first_http(self, uris):
+ return self.first_whatever(uris, "http://")
- def __init__(self, fn):
- try:
- x = rpki.POW.X509.derReadFile(fn)
- except: # pylint: disable=W0702
- try:
- cms = rpki.POW.CMS.derReadFile(fn)
- cms.extractWithoutVerifying()
- x = cms.certs()[0]
- except:
- raise ValueError
- sia = x.getSIA() or (None, None, None, None)
- self.fn = fn
- self.uris = (
- ("AIA:caIssuers", self.first_rsync(x.getAIA())),
- ("SIA:caRepository", self.first_rsync(sia[0])),
- ("SIA:rpkiManifest", self.first_rsync(sia[1])),
- ("SIA:signedObject", self.first_rsync(sia[2])),
- ("SIA:rpkiNotify", self.first_http(sia[3])),
- ("CRLDP", self.first_rsync(x.getCRLDP())))
+ def __init__(self, fn):
+ try:
+ x = rpki.POW.X509.derReadFile(fn)
+ except: # pylint: disable=W0702
+ try:
+ cms = rpki.POW.CMS.derReadFile(fn)
+ cms.extractWithoutVerifying()
+ x = cms.certs()[0]
+ except:
+ raise ValueError
+ sia = x.getSIA() or (None, None, None, None)
+ self.fn = fn
+ self.uris = (
+ ("AIA:caIssuers", self.first_rsync(x.getAIA())),
+ ("SIA:caRepository", self.first_rsync(sia[0])),
+ ("SIA:rpkiManifest", self.first_rsync(sia[1])),
+ ("SIA:signedObject", self.first_rsync(sia[2])),
+ ("SIA:rpkiNotify", self.first_http(sia[3])),
+ ("CRLDP", self.first_rsync(x.getCRLDP())))
- def __str__(self):
- words = [self.fn] if args.single_line else ["File: " + self.fn]
- words.extend(" %s: %s" % (tag, uri) for tag, uri in self.uris if uri is not None)
- return ("" if args.single_line else "\n").join(words)
+ def __str__(self):
+ words = [self.fn] if args.single_line else ["File: " + self.fn]
+ words.extend(" %s: %s" % (tag, uri) for tag, uri in self.uris if uri is not None)
+ return ("" if args.single_line else "\n").join(words)
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument("-s", "--single-line", action = "store_true", help = "single output line per object")
@@ -74,4 +74,4 @@ parser.add_argument("certs", nargs = "+", type = Certificate, help = "RPKI objec
args = parser.parse_args()
for cert in args.certs:
- print cert
+ print cert