aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRob Austein <sra@hactrn.net>2012-03-19 21:14:48 +0000
committerRob Austein <sra@hactrn.net>2012-03-19 21:14:48 +0000
commit8e3eb0fc2b371e1eb0f93afad97ff6cd53e7d78b (patch)
treefb5df7edce1c65230beb4770de2d0561d7387cd8
parent4950a77da61c9993788c0557ef362ecdc0a1cf18 (diff)
parentd559bc6a29311fa1c414ce8cc3632f5f74ee8485 (diff)
Sync from trunk.
svn path=/branches/tk161/; revision=4403
-rw-r--r--openssl/Makefile.in4
-rw-r--r--openssl/openssl-1.0.0f.tar.gzbin4043367 -> 0 bytes
-rw-r--r--openssl/openssl-1.0.0h.tar.gzbin0 -> 4048067 bytes
-rwxr-xr-xopenssl/update-snapshot.sh2
-rw-r--r--rcynic/rcynic.c62
-rw-r--r--rcynic/rpki-torrent.py701
-rw-r--r--rpkid/left-right-schema.rnc2
-rw-r--r--rpkid/left-right-schema.rng4
-rw-r--r--rpkid/rpki/http.py9
-rw-r--r--rpkid/rpki/left_right.py100
-rw-r--r--rpkid/rpki/publication.py11
-rw-r--r--rpkid/rpki/relaxng.py4
-rw-r--r--rpkid/rpki/rootd.py73
-rw-r--r--rpkid/rpki/rpkid.py78
-rw-r--r--rpkid/tests/revoke.yaml198
-rw-r--r--rpkid/tests/smoketest.py1
-rwxr-xr-xrtr-origin/rtr-origin.py10
-rw-r--r--scripts/analyze-rcynic-history.py149
-rw-r--r--scripts/roa-to-irr.py41
-rw-r--r--scripts/testbed-rootcert.py2
20 files changed, 1283 insertions, 168 deletions
diff --git a/openssl/Makefile.in b/openssl/Makefile.in
index b5085c17..6498ed30 100644
--- a/openssl/Makefile.in
+++ b/openssl/Makefile.in
@@ -1,6 +1,6 @@
# $Id$
-VERSION = 1.0.0f
+VERSION = 1.0.0h
OPENSSL_CONFIG_COMMAND = @OPENSSL_CONFIG_COMMAND@
OPENSSL_BUILD_DIRECTORY = ${abs_builddir}/openssl
@@ -56,7 +56,7 @@ distclean: clean
openssl-${VERSION}/Makefile: openssl-${VERSION}/config
cd openssl-${VERSION}; ${OPENSSL_CONFIG_COMMAND} ${OPTIONS}
- cd openssl-${VERSION}; ${MAKE} depend
+ -cd openssl-${VERSION}; ${MAKE} depend
openssl-${VERSION}/config: openssl-${VERSION}.tar.gz
gzip -c -d openssl-${VERSION}.tar.gz | tar -xf -
diff --git a/openssl/openssl-1.0.0f.tar.gz b/openssl/openssl-1.0.0f.tar.gz
deleted file mode 100644
index 7bce05eb..00000000
--- a/openssl/openssl-1.0.0f.tar.gz
+++ /dev/null
Binary files differ
diff --git a/openssl/openssl-1.0.0h.tar.gz b/openssl/openssl-1.0.0h.tar.gz
new file mode 100644
index 00000000..516416bd
--- /dev/null
+++ b/openssl/openssl-1.0.0h.tar.gz
Binary files differ
diff --git a/openssl/update-snapshot.sh b/openssl/update-snapshot.sh
index 94932f59..3320caeb 100755
--- a/openssl/update-snapshot.sh
+++ b/openssl/update-snapshot.sh
@@ -6,7 +6,7 @@
#version="1.0.0-stable-SNAP-$(date +%Y%m%d)"
-version="1.0.0f"
+version="1.0.0h"
tarball="openssl-${version}.tar.gz"
diff --git a/rcynic/rcynic.c b/rcynic/rcynic.c
index 400f53a2..d22e95d4 100644
--- a/rcynic/rcynic.c
+++ b/rcynic/rcynic.c
@@ -1620,22 +1620,35 @@ static int walk_ctx_loop_done(STACK_OF(walk_ctx_t) *wsk)
* context which collectively define the current pass, product URI,
* etc, and we want to be able to iterate through this sequence via
* the event system. So this function steps to the next state.
+ *
+ * Conceptually, w->manifest->fileList and w->filenames form a single
+ * array with index w->manifest_iteration + w->filename_iteration.
+ * Beware of fencepost errors, I've gotten this wrong once already.
+ * Slightly odd coding here is to make it easier to check this.
*/
static void walk_ctx_loop_next(const rcynic_ctx_t *rc, STACK_OF(walk_ctx_t) *wsk)
{
walk_ctx_t *w = walk_ctx_stack_head(wsk);
+ int n_manifest, n_filenames;
assert(rc && wsk && w);
- if (w->manifest && w->manifest_iteration + 1 < sk_FileAndHash_num(w->manifest->fileList)) {
- w->manifest_iteration++;
- return;
+ assert(w->manifest_iteration >= 0 && w->filename_iteration >= 0);
+
+ n_manifest = w->manifest ? sk_FileAndHash_num(w->manifest->fileList) : 0;
+ n_filenames = w->filenames ? sk_OPENSSL_STRING_num(w->filenames) : 0;
+
+ if (w->manifest_iteration + w->filename_iteration < n_manifest + n_filenames) {
+ if (w->manifest_iteration < n_manifest)
+ w->manifest_iteration++;
+ else
+ w->filename_iteration++;
}
- if (w->filenames && w->filename_iteration + 1 < sk_OPENSSL_STRING_num(w->filenames)) {
- w->filename_iteration++;
+ assert(w->manifest_iteration <= n_manifest && w->filename_iteration <= n_filenames);
+
+ if (w->manifest_iteration + w->filename_iteration < n_manifest + n_filenames)
return;
- }
while (!walk_ctx_loop_done(wsk)) {
w->state++;
@@ -2878,6 +2891,7 @@ static int check_aki(rcynic_ctx_t *rc,
if (ASN1_OCTET_STRING_cmp(aki->keyid, issuer->skid)) {
log_validation_status(rc, uri, aki_extension_issuer_mismatch, generation);
+ return 0;
}
return 1;
@@ -3500,11 +3514,6 @@ static int check_x509(rcynic_ctx_t *rc,
goto done;
}
- if ((issuer_pkey = X509_get_pubkey(w->cert)) == NULL || X509_verify(x, issuer_pkey) <= 0) {
- log_validation_status(rc, uri, certificate_bad_signature, generation);
- goto done;
- }
-
if (x->akid) {
ex_count--;
if (!check_aki(rc, uri, w->cert, x->akid, generation))
@@ -3516,6 +3525,11 @@ static int check_x509(rcynic_ctx_t *rc,
goto done;
}
+ if ((issuer_pkey = X509_get_pubkey(w->cert)) == NULL || X509_verify(x, issuer_pkey) <= 0) {
+ log_validation_status(rc, uri, certificate_bad_signature, generation);
+ goto done;
+ }
+
if (certinfo->ta) {
if (certinfo->crldp.s[0]) {
@@ -4591,6 +4605,7 @@ static X509 *read_ta(rcynic_ctx_t *rc,
EVP_PKEY_free(xpkey);
if (match)
return x;
+ log_validation_status(rc, uri, object_rejected, generation);
X509_free(x);
return NULL;
}
@@ -4722,6 +4737,7 @@ int main(int argc, char *argv[])
{
int opt_jitter = 0, use_syslog = 0, use_stderr = 0, syslog_facility = 0;
int opt_syslog = 0, opt_stderr = 0, opt_level = 0, prune = 1;
+ int opt_auth = 0, opt_unauth = 0, keep_lockfile = 0;
char *cfg_file = "rcynic.conf";
char *lockfile = NULL, *xmlfile = NULL;
int c, i, j, ret = 1, jitter = 600, lockfd = -1;
@@ -4768,8 +4784,13 @@ int main(int argc, char *argv[])
OpenSSL_add_all_algorithms();
ERR_load_crypto_strings();
- while ((c = getopt(argc, argv, "c:l:sej:V")) > 0) {
+ while ((c = getopt(argc, argv, "a:c:l:sej:u:V")) > 0) {
switch (c) {
+ case 'a':
+ opt_auth = 1;
+ if (!set_directory(&rc, &rc.authenticated, optarg, 0))
+ goto done;
+ break;
case 'c':
cfg_file = optarg;
break;
@@ -4789,6 +4810,11 @@ int main(int argc, char *argv[])
goto done;
opt_jitter = 1;
break;
+ case 'u':
+ opt_unauth = 1;
+ if (!set_directory(&rc, &rc.unauthenticated, optarg, 1))
+ goto done;
+ break;
case 'V':
puts(svn_id);
ret = 0;
@@ -4838,11 +4864,13 @@ int main(int argc, char *argv[])
assert(val && val->name && val->value);
- if (!name_cmp(val->name, "authenticated") &&
+ if (!opt_auth &&
+ !name_cmp(val->name, "authenticated") &&
!set_directory(&rc, &rc.authenticated, val->value, 0))
goto done;
- else if (!name_cmp(val->name, "unauthenticated") &&
+ else if (!opt_unauth &&
+ !name_cmp(val->name, "unauthenticated") &&
!set_directory(&rc, &rc.unauthenticated, val->value, 1))
goto done;
@@ -4864,6 +4892,10 @@ int main(int argc, char *argv[])
else if (!name_cmp(val->name, "lockfile"))
lockfile = strdup(val->value);
+ else if (!name_cmp(val->name, "keep-lockfile") &&
+ !configure_boolean(&rc, &keep_lockfile, val->value))
+ goto done;
+
else if (!opt_jitter &&
!name_cmp(val->name, "jitter") &&
!configure_integer(&rc, &jitter, val->value))
@@ -5205,7 +5237,7 @@ int main(int argc, char *argv[])
ERR_free_strings();
if (rc.rsync_program)
free(rc.rsync_program);
- if (lockfile && lockfd >= 0)
+ if (lockfile && lockfd >= 0 && !keep_lockfile)
unlink(lockfile);
if (lockfile)
free(lockfile);
diff --git a/rcynic/rpki-torrent.py b/rcynic/rpki-torrent.py
new file mode 100644
index 00000000..1dfc26e7
--- /dev/null
+++ b/rcynic/rpki-torrent.py
@@ -0,0 +1,701 @@
+#!/usr/local/bin/python
+
+"""
+$Id$
+
+Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC")
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+"""
+
+import urllib2
+import httplib
+import socket
+import ssl
+import urlparse
+import zipfile
+import sys
+import os
+import email.utils
+import base64
+import hashlib
+import subprocess
+import syslog
+import traceback
+import ConfigParser
+import stat
+import time
+import errno
+import fcntl
+import getopt
+import smtplib
+import email.mime.text
+
+import transmissionrpc
+
+tr_env_vars = ("TR_TORRENT_DIR", "TR_TORRENT_ID", "TR_TORRENT_NAME")
+
+class WrongServer(Exception):
+ "Hostname not in X.509v3 subjectAltName extension."
+
+class UnexpectedRedirect(Exception):
+ "Unexpected HTTP redirect."
+
+class WrongMode(Exception):
+ "Wrong operation for mode."
+
+class BadFormat(Exception):
+ "Zip file does not match our expectations."
+
+class InconsistentEnvironment(Exception):
+ "Environment variables received from Transmission aren't consistent."
+
+class TorrentNotReady(Exception):
+ "Torrent is not ready for checking."
+
+class TorrentDoesNotMatchManifest(Exception):
+ "Retrieved torrent does not match manifest."
+
+class TorrentNameDoesNotMatchURL(Exception):
+ "Torrent name doesn't uniquely match a URL."
+
+class CouldNotFindTorrents(Exception):
+ "Could not find torrent(s) with given name(s)."
+
+class UseTheSourceLuke(Exception):
+ "Use The Source, Luke."
+
+def main():
+ try:
+ syslog_flags = syslog.LOG_PID;
+ if os.isatty(sys.stderr.fileno()):
+ syslog_flags |= syslog.LOG_PERROR
+ syslog.openlog("rpki-torrent", syslog_flags)
+
+ cfg_file = [os.path.join(dn, fn)
+ for fn in ("rcynic.conf", "rpki.conf")
+ for dn in ("/var/rcynic/etc", "/usr/local/etc", "/etc")]
+
+ opts, argv = getopt.getopt(sys.argv[1:], "c:h?", ["config=", "help"])
+ for o, a in opts:
+ if o in ("-h", "--help", "-?"):
+ raise UseTheSourceLuke
+ elif o in ("-c", "--config"):
+ cfg_file = a
+
+ global cfg
+ cfg = MyConfigParser()
+ cfg.read(cfg_file)
+
+ if cfg.act_as_generator:
+ if len(argv) == 1 and argv[0] == "generate":
+ generator_main()
+ elif len(argv) == 1 and argv[0] == "mirror":
+ mirror_main()
+ else:
+ raise UseTheSourceLuke
+
+ else:
+ if len(argv) == 0 and all(v in os.environ for v in tr_env_vars):
+ torrent_completion_main()
+ elif len(argv) == 1 and argv[0] == "poll":
+ poll_main()
+ else:
+ raise UseTheSourceLuke
+
+ except Exception, e:
+ for line in traceback.format_exc().splitlines():
+ syslog.syslog(line)
+ sys.exit(1)
+
+
+def generator_main():
+ import paramiko
+
+ class SFTPClient(paramiko.SFTPClient):
+ def atomic_rename(self, oldpath, newpath):
+ oldpath = self._adjust_cwd(oldpath)
+ newpath = self._adjust_cwd(newpath)
+ self._log(paramiko.common.DEBUG, 'atomic_rename(%r, %r)' % (oldpath, newpath))
+ self._request(paramiko.sftp.CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath)
+
+ z = ZipFile(url = cfg.generate_url, dir = cfg.zip_dir)
+ client = TransmissionClient()
+
+ client.remove_torrents(z.torrent_name)
+
+ download_dir = client.get_session().download_dir
+ torrent_dir = os.path.join(download_dir, z.torrent_name)
+ torrent_file = os.path.join(cfg.zip_dir, z.torrent_name + ".torrent")
+
+
+ syslog.syslog("Synchronizing local data from %s to %s" % (cfg.unauthenticated, torrent_dir))
+ subprocess.check_call((cfg.rsync_prog, "--archive", "--delete",
+ os.path.normpath(cfg.unauthenticated) + "/",
+ os.path.normpath(torrent_dir) + "/"))
+
+ syslog.syslog("Creating %s" % torrent_file)
+ try:
+ os.unlink(torrent_file)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ ignore_output_for_now = subprocess.check_output(
+ (cfg.mktorrent_prog,
+ "-a", cfg.tracker_url,
+ "-c", "RPKI unauthenticated data snapshot generated by rpki-torrent",
+ "-o", torrent_file,
+ torrent_dir))
+
+ syslog.syslog("Generating manifest")
+ manifest = create_manifest(download_dir, z.torrent_name)
+
+ syslog.syslog("Loading %s with unlimited seeding" % torrent_file)
+ f = open(torrent_file, "rb")
+ client.add(base64.b64encode(f.read()))
+ f.close()
+ client.unlimited_seeding(z.torrent_name)
+
+ syslog.syslog("Creating upload connection")
+ ssh = paramiko.Transport((cfg.sftp_host, cfg.sftp_port))
+ ssh.connect(
+ username = cfg.sftp_user,
+ hostkey = paramiko.util.load_host_keys(cfg.sftp_hostkey_file)[cfg.sftp_host]["ssh-rsa"],
+ pkey = paramiko.RSAKey.from_private_key_file(cfg.sftp_private_key_file))
+ sftp = SFTPClient.from_transport(ssh)
+
+ zip_filename = os.path.join("data", os.path.basename(z.filename))
+ zip_tempname = zip_filename + ".new"
+
+ syslog.syslog("Creating %s" % zip_tempname)
+ f = sftp.open(zip_tempname, "wb")
+ z.set_output_stream(f)
+
+ syslog.syslog("Writing %s to zip" % torrent_file)
+ z.write(
+ torrent_file,
+ arcname = os.path.basename(torrent_file),
+ compress_type = zipfile.ZIP_DEFLATED)
+
+ manifest_name = z.torrent_name + ".manifest"
+
+ syslog.syslog("Writing %s to zip" % manifest_name)
+ zi = zipfile.ZipInfo(manifest_name, time.gmtime()[:6])
+ zi.external_attr = (stat.S_IFREG | 0644) << 16
+ zi.internal_attr = 1 # Text, not binary
+ z.writestr(zi,
+ "".join("%s %s\n" % (v, k) for k, v in manifest.iteritems()),
+ zipfile.ZIP_DEFLATED)
+
+ syslog.syslog("Closing %s and renaming to %s" % (zip_tempname, zip_filename))
+ z.close()
+ f.close()
+ sftp.atomic_rename(zip_tempname, zip_filename)
+
+ syslog.syslog("Closing upload connection")
+ sftp.close()
+
+def mirror_main():
+ client = TransmissionClient()
+ torrent_names = []
+
+ for zip_url in cfg.zip_urls:
+ if zip_url != cfg.generate_url:
+ z = ZipFile(url = zip_url, dir = cfg.zip_dir, ta = cfg.zip_ta)
+ if z.fetch():
+ client.remove_torrents(z.torrent_name)
+ syslog.syslog("Mirroring torrent %s" % z.torrent_name)
+ client.add(z.get_torrent())
+ torrent_names.append(z.torrent_name)
+
+ if torrent_names:
+ client.unlimited_seeding(*torrent_names)
+
+
+def poll_main():
+ for zip_url in cfg.zip_urls:
+
+ z = ZipFile(url = zip_url, dir = cfg.zip_dir, ta = cfg.zip_ta)
+ client = TransmissionClient()
+
+ if z.fetch():
+ client.remove_torrents(z.torrent_name)
+ syslog.syslog("Adding torrent %s" % z.torrent_name)
+ client.add(z.get_torrent())
+
+ elif cfg.run_rcynic_anyway:
+ run_rcynic(client, z)
+
+
+def torrent_completion_main():
+ torrent_name = os.getenv("TR_TORRENT_NAME")
+ torrent_id = int(os.getenv("TR_TORRENT_ID"))
+
+ z = ZipFile(url = cfg.find_url(torrent_name), dir = cfg.zip_dir, ta = cfg.zip_ta)
+ client = TransmissionClient()
+ torrent = client.info([torrent_id]).popitem()[1]
+
+ if torrent.name != torrent_name:
+ raise InconsistentEnvironment("Torrent name %s does not match ID %d" % (torrent_name, torrent_id))
+
+ if z.torrent_name != torrent_name:
+ raise InconsistentEnvironment("Torrent name %s does not match torrent name in zip file %s" % (torrent_name, z.torrent_name))
+
+ if torrent is None or torrent.progress != 100:
+ raise TorrentNotReady("Torrent %s not ready for checking, how did I get here?" % torrent_name)
+
+ log_email("Download complete %s" % z.url)
+
+ run_rcynic(client, z)
+
+
+def run_rcynic(client, z):
+ """
+ Run rcynic and any post-processing we might want.
+ """
+
+ if cfg.lockfile is not None:
+ syslog.syslog("Acquiring lock %s" % cfg.lockfile)
+ lock = os.open(cfg.lockfile, os.O_WRONLY | os.O_CREAT, 0600)
+ fcntl.flock(lock, fcntl.LOCK_EX)
+ else:
+ lock = None
+
+ syslog.syslog("Checking manifest against disk")
+
+ download_dir = client.get_session().download_dir
+
+ manifest_from_disk = create_manifest(download_dir, z.torrent_name)
+ manifest_from_zip = z.get_manifest()
+
+ excess_files = set(manifest_from_disk) - set(manifest_from_zip)
+ for fn in excess_files:
+ del manifest_from_disk[fn]
+
+ if manifest_from_disk != manifest_from_zip:
+ raise TorrentDoesNotMatchManifest("Manifest for torrent %s does not match what we got" %
+ z.torrent_name)
+
+ if excess_files:
+ syslog.syslog("Cleaning up excess files")
+ for fn in excess_files:
+ os.unlink(os.path.join(download_dir, fn))
+
+ syslog.syslog("Running rcynic")
+ log_email("Starting rcynic %s" % z.url)
+ subprocess.check_call((cfg.rcynic_prog,
+ "-c", cfg.rcynic_conf,
+ "-u", os.path.join(client.get_session().download_dir, z.torrent_name)))
+ log_email("Completed rcynic %s" % z.url)
+
+ for cmd in cfg.post_rcynic_commands:
+ syslog.syslog("Running post-rcynic command: %s" % cmd)
+ subprocess.check_call(cmd, shell = True)
+
+ if lock is not None:
+ syslog.syslog("Releasing lock %s" % cfg.lockfile)
+ os.close(lock)
+
+# See http://www.minstrel.org.uk/papers/sftp/ for details on how to
+# set up safe upload-only SFTP directories on the server. In
+# particular http://www.minstrel.org.uk/papers/sftp/builtin/ is likely
+# to be the right path.
+
+
+class ZipFile(object):
+ """
+ Augmented version of standard python zipfile.ZipFile class, with
+ some extra methods and specialized capabilities.
+
+ All methods of the standard zipfile.ZipFile class are supported, but
+ the constructor arguments are different, and opening the zip file
+ itself is deferred until a call which requires this, since the file
+ may first need to be fetched via HTTPS.
+ """
+
+ def __init__(self, url, dir, ta = None, verbose = True):
+ self.url = url
+ self.dir = dir
+ self.ta = ta
+ self.verbose = verbose
+ self.filename = os.path.join(dir, os.path.basename(url))
+ self.changed = False
+ self.zf = None
+ self.peercert = None
+ self.torrent_name, zip_ext = os.path.splitext(os.path.basename(url))
+ if zip_ext != ".zip":
+ raise BadFormat
+
+
+ def __getattr__(self, name):
+ if self.zf is None:
+ self.zf = zipfile.ZipFile(self.filename)
+ return getattr(self.zf, name)
+
+
+ def build_opener(self):
+ """
+ Voodoo to create a urllib2.OpenerDirector object with TLS
+ certificate checking enabled and a hook to set self.peercert so
+ our caller can check the subjectAltName field.
+
+ You probably don't want to look at this if you can avoid it.
+ """
+
+ assert self.ta is not None
+
+ # Yes, we're constructing one-off classes. Look away, look away.
+
+ class HTTPSConnection(httplib.HTTPSConnection):
+ zip = self
+ def connect(self):
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+ if getattr(self, "_tunnel_host", None):
+ self.sock = sock
+ self._tunnel()
+ self.sock = ssl.wrap_socket(sock,
+ keyfile = self.key_file,
+ certfile = self.cert_file,
+ cert_reqs = ssl.CERT_REQUIRED,
+ ssl_version = ssl.PROTOCOL_TLSv1,
+ ca_certs = self.zip.ta)
+ self.zip.peercert = self.sock.getpeercert()
+
+ class HTTPSHandler(urllib2.HTTPSHandler):
+ def https_open(self, req):
+ return self.do_open(HTTPSConnection, req)
+
+ return urllib2.build_opener(HTTPSHandler)
+
+
+ def check_subjectAltNames(self):
+ """
+ Check self.peercert against URL to make sure we were talking to
+ the right HTTPS server.
+ """
+
+ hostname = urlparse.urlparse(self.url).hostname
+ subjectAltNames = set(i[1]
+ for i in self.peercert.get("subjectAltName", ())
+ if i[0] == "DNS")
+ if hostname not in subjectAltNames:
+ raise WrongServer
+
+
+ def download_file(self, r, bufsize = 4096):
+ """
+ Downloaded file to disk.
+ """
+
+ tempname = self.filename + ".new"
+ f = open(tempname, "wb")
+ n = int(r.info()["Content-Length"])
+ for i in xrange(0, n - bufsize, bufsize):
+ f.write(r.read(bufsize))
+ f.write(r.read())
+ f.close()
+ mtime = email.utils.mktime_tz(email.utils.parsedate_tz(r.info()["Last-Modified"]))
+ os.utime(tempname, (mtime, mtime))
+ os.rename(tempname, self.filename)
+
+
+ def set_output_stream(self, stream):
+ """
+ Set up this zip file for writing to a network stream.
+ """
+
+ assert self.zf is None
+ self.zf = zipfile.ZipFile(stream, "w")
+
+
+ def fetch(self):
+ """
+ Fetch zip file from URL given to constructor.
+ """
+
+ headers = { "User-Agent" : "rpki-torrent" }
+ try:
+ headers["If-Modified-Since"] = email.utils.formatdate(
+ os.path.getmtime(self.filename), False, True)
+ except OSError:
+ pass
+
+ syslog.syslog("Checking %s..." % self.url)
+ try:
+ r = self.build_opener().open(urllib2.Request(self.url, None, headers))
+ syslog.syslog("%s has changed, starting download" % self.url)
+ self.changed = True
+ log_email("Downloading %s" % self.url)
+ except urllib2.HTTPError, e:
+ if e.code != 304:
+ raise
+ r = None
+ syslog.syslog("%s has not changed" % self.url)
+
+ self.check_subjectAltNames()
+
+ if r is not None and r.geturl() != self.url:
+ raise UnexpectedRedirect
+
+ if r is not None:
+ self.download_file(r)
+ r.close()
+
+ return self.changed
+
+
+ def check_format(self):
+ """
+ Make sure that format of zip file matches our preconceptions: it
+ should contain two files, one of which is the .torrent file, the
+ other is the manifest, with names derived from the torrent name
+ inferred from the URL.
+ """
+
+ if set(self.namelist()) != set((self.torrent_name + ".torrent", self.torrent_name + ".manifest")):
+ raise BadFormat
+
+
+ def get_torrent(self):
+ """
+ Extract torrent file from zip file, encoded in Base64 because
+ that's what the transmisionrpc library says it wants.
+ """
+
+ self.check_format()
+ return base64.b64encode(self.read(self.torrent_name + ".torrent"))
+
+
+ def get_manifest(self):
+ """
+ Extract manifest from zip file, as a dictionary.
+
+ For the moment we're fixing up the internal file names from the
+ format that the existing shell-script prototype uses, but this
+ should go away once this program both generates and checks the
+ manifests.
+ """
+
+ self.check_format()
+ result = {}
+ for line in self.open(self.torrent_name + ".manifest"):
+ h, fn = line.split()
+ #
+ # Fixup for earlier manifest format, this should go away
+ if not fn.startswith(self.torrent_name):
+ fn = os.path.normpath(os.path.join(self.torrent_name, fn))
+ #
+ result[fn] = h
+ return result
+
+
+def create_manifest(topdir, torrent_name):
+ """
+ Generate a manifest, expressed as a dictionary.
+ """
+
+ result = {}
+ topdir = os.path.abspath(topdir)
+ for dirpath, dirnames, filenames in os.walk(os.path.join(topdir, torrent_name)):
+ for filename in filenames:
+ filename = os.path.join(dirpath, filename)
+ f = open(filename, "rb")
+ result[os.path.relpath(filename, topdir)] = hashlib.sha256(f.read()).hexdigest()
+ f.close()
+ return result
+
+
+def log_email(msg, subj = None):
+ try:
+ if not msg.endswith("\n"):
+ msg += "\n"
+ if subj is None:
+ subj = msg.partition("\n")[0]
+ m = email.mime.text.MIMEText(msg)
+ m["Date"] = time.strftime("%d %b %Y %H:%M:%S +0000", time.gmtime())
+ m["From"] = cfg.log_email
+ m["To"] = cfg.log_email
+ m["Subject"] = subj
+ s = smtplib.SMTP("localhost")
+ s.sendmail(cfg.log_email, [cfg.log_email], m.as_string())
+ s.quit()
+ except ConfigParser.Error:
+ pass
+
+
+class TransmissionClient(transmissionrpc.client.Client):
+ """
+ Extension of transmissionrpc.client.Client.
+ """
+
+ def __init__(self, **kwargs):
+ kwargs.setdefault("address", "127.0.0.1")
+ transmissionrpc.client.Client.__init__(self, **kwargs)
+
+
+ def find_torrents(self, *names):
+ """
+ Find torrents with given name(s), return id(s).
+ """
+
+ result = [i for i, t in self.list().iteritems() if t.name in names]
+ if not result:
+ raise CouldNotFindTorrents
+ return result
+
+
+ def remove_torrents(self, *names):
+ """
+ Remove any torrents with the given name(s).
+ """
+
+ try:
+ ids = self.find_torrents(*names)
+ except CouldNotFindTorrents:
+ pass
+ else:
+ syslog.syslog("Removing torrent%s %s (%s)" % (
+ "" if len(ids) == 1 else "s",
+ ", ".join(names),
+ ", ".join("#%s" % i for i in ids)))
+ self.remove(ids)
+
+ def unlimited_seeding(self, *names):
+ """
+ Set unlimited seeding for specified torrents.
+ """
+
+ # Apparently seedRatioMode = 2 means "no limit"
+ try:
+ self.change(self.find_torrents(*names), seedRatioMode = 2)
+ except CouldNotFindTorrents:
+ syslog.syslog("Couldn't tweak seedRatioMode, blundering onwards")
+
+
+class MyConfigParser(ConfigParser.RawConfigParser):
+
+ rpki_torrent_section = "rpki-torrent"
+
+ @property
+ def zip_dir(self):
+ return self.get(self.rpki_torrent_section, "zip_dir")
+
+ @property
+ def zip_ta(self):
+ return self.get(self.rpki_torrent_section, "zip_ta")
+
+ @property
+ def rcynic_prog(self):
+ return self.get(self.rpki_torrent_section, "rcynic_prog")
+
+ @property
+ def rcynic_conf(self):
+ return self.get(self.rpki_torrent_section, "rcynic_conf")
+
+ @property
+ def run_rcynic_anyway(self):
+ return self.getboolean(self.rpki_torrent_section, "run_rcynic_anyway")
+
+ @property
+ def generate_url(self):
+ return self.get(self.rpki_torrent_section, "generate_url")
+
+ @property
+ def act_as_generator(self):
+ try:
+ return self.get(self.rpki_torrent_section, "generate_url") != ""
+ except ConfigParser.Error:
+ return False
+
+ @property
+ def rsync_prog(self):
+ return self.get(self.rpki_torrent_section, "rsync_prog")
+
+ @property
+ def mktorrent_prog(self):
+ return self.get(self.rpki_torrent_section, "mktorrent_prog")
+
+ @property
+ def tracker_url(self):
+ return self.get(self.rpki_torrent_section, "tracker_url")
+
+ @property
+ def sftp_host(self):
+ return self.get(self.rpki_torrent_section, "sftp_host")
+
+ @property
+ def sftp_port(self):
+ try:
+ return self.getint(self.rpki_torrent_section, "sftp_port")
+ except ConfigParser.Error:
+ return 22
+
+ @property
+ def sftp_user(self):
+ return self.get(self.rpki_torrent_section, "sftp_user")
+
+ @property
+ def sftp_hostkey_file(self):
+ return self.get(self.rpki_torrent_section, "sftp_hostkey_file")
+
+ @property
+ def sftp_private_key_file(self):
+ return self.get(self.rpki_torrent_section, "sftp_private_key_file")
+
+ @property
+ def lockfile(self):
+ try:
+ return self.get(self.rpki_torrent_section, "lockfile")
+ except ConfigParser.Error:
+ return None
+
+ @property
+ def unauthenticated(self):
+ try:
+ return self.get(self.rpki_torrent_section, "unauthenticated")
+ except ConfigParser.Error:
+ return self.get("rcynic", "unauthenticated")
+
+ @property
+ def log_email(self):
+ return self.get(self.rpki_torrent_section, "log_email")
+
+ def multioption_iter(self, name, getter = None):
+ if getter is None:
+ getter = self.get
+ if self.has_option(self.rpki_torrent_section, name):
+ yield getter(self.rpki_torrent_section, name)
+ name += "."
+ names = [i for i in self.options(self.rpki_torrent_section) if i.startswith(name) and i[len(name):].isdigit()]
+ names.sort(key = lambda s: int(s[len(name):]))
+ for name in names:
+ yield getter(self.rpki_torrent_section, name)
+
+ @property
+ def zip_urls(self):
+ return self.multioption_iter("zip_url")
+
+ @property
+ def post_rcynic_commands(self):
+ return self.multioption_iter("post_rcynic_command")
+
+ def find_url(self, torrent_name):
+ urls = [u for u in self.zip_urls
+ if os.path.splitext(os.path.basename(u))[0] == torrent_name]
+ if len(urls) != 1:
+ raise TorrentNameDoesNotMatchURL("Can't find URL matching torrent name %s" % torrent_name)
+ return urls[0]
+
+
+if __name__ == "__main__":
+ main()
diff --git a/rpkid/left-right-schema.rnc b/rpkid/left-right-schema.rnc
index 3b058502..a2759f56 100644
--- a/rpkid/left-right-schema.rnc
+++ b/rpkid/left-right-schema.rnc
@@ -247,7 +247,7 @@ list_roa_requests_query = element list_roa_requests {
list_roa_requests_reply = element list_roa_requests {
tag, self_handle,
- attribute asn { xsd:positiveInteger },
+ attribute asn { xsd:nonNegativeInteger },
attribute ipv4 { ipv4_list }?,
attribute ipv6 { ipv6_list }?
}
diff --git a/rpkid/left-right-schema.rng b/rpkid/left-right-schema.rng
index 1ab21668..80beb1f5 100644
--- a/rpkid/left-right-schema.rng
+++ b/rpkid/left-right-schema.rng
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
- $Id: left-right-schema.rnc 3730 2011-03-21 12:42:43Z sra $
+ $Id: left-right-schema.rnc 4346 2012-02-17 01:11:06Z sra $
RelaxNG Schema for RPKI left-right protocol.
@@ -875,7 +875,7 @@
<ref name="tag"/>
<ref name="self_handle"/>
<attribute name="asn">
- <data type="positiveInteger"/>
+ <data type="nonNegativeInteger"/>
</attribute>
<optional>
<attribute name="ipv4">
diff --git a/rpkid/rpki/http.py b/rpkid/rpki/http.py
index 7d7e81ba..a0055ac9 100644
--- a/rpkid/rpki/http.py
+++ b/rpkid/rpki/http.py
@@ -766,7 +766,14 @@ class http_client(http_stream):
self.update_timeout()
if self.msg.code != 200:
- raise rpki.exceptions.HTTPRequestFailed, "HTTP request failed with status %s, reason %s, response %s" % (self.msg.code, self.msg.reason, self.msg.body)
+ errmsg = "HTTP request failed"
+ if self.msg.code is not None:
+ errmsg += " with status %s" % self.msg.code
+ if self.msg.reason:
+ errmsg += ", reason %s" % self.msg.reason
+ if self.msg.body:
+ errmsg += ", response %s" % self.msg.body
+ raise rpki.exceptions.HTTPRequestFailed(errmsg)
self.queue.return_result(self, self.msg, detach = self.expect_close)
def handle_close(self):
diff --git a/rpkid/rpki/left_right.py b/rpkid/rpki/left_right.py
index ac480ff0..17d665c9 100644
--- a/rpkid/rpki/left_right.py
+++ b/rpkid/rpki/left_right.py
@@ -190,6 +190,7 @@ class self_elt(data_elt):
"""
return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
+
def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb):
"""
Extra server actions for self_elt.
@@ -248,6 +249,16 @@ class self_elt(data_elt):
parent.serve_revoke_forgotten(iterator, eb)
rpki.async.iterator(self.parents, loop, cb)
+ def serve_destroy_hook(self, cb, eb):
+ """
+ Extra cleanup actions when destroying a self_elt.
+ """
+ rpki.log.trace()
+ def loop(iterator, parent):
+ parent.delete(iterator)
+ rpki.async.iterator(self.parents, loop, cb)
+
+
def serve_publish_world_now(self, cb, eb):
"""
Handle a left-right publish_world_now action for this self.
@@ -704,6 +715,7 @@ class self_elt(data_elt):
self.gctx.sql.sweep()
self.gctx.irdb_query_roa_requests(self.self_handle, got_roa_requests, roa_requests_failed)
+
class bsc_elt(data_elt):
"""
<bsc/> (Business Signing Context) element.
@@ -931,6 +943,39 @@ class parent_elt(data_elt):
ca.reissue(cb = iterator, eb = eb)
rpki.async.iterator(self.cas, loop, cb)
+
+ def get_skis(self, cb, eb):
+ """
+ Fetch SKIs that this parent thinks we have. In theory this should
+ agree with our own database, but in practice stuff can happen, so
+ sometimes we need to know what our parent thinks.
+
+ Result is a dictionary with the resource class name as key and a
+ set of SKIs as value.
+ """
+
+ def done(r_msg):
+ cb(dict((rc.class_name, set(c.cert.gSKI() for c in rc.certs))
+ for rc in r_msg.payload.classes))
+
+ rpki.up_down.list_pdu.query(self, done, eb)
+
+
+ def revoke_skis(self, rc_name, skis_to_revoke, cb, eb):
+ """
+ Revoke a set of SKIs within a particular resource class.
+ """
+
+ def loop(iterator, ski):
+ rpki.log.debug("Asking parent %r to revoke class %r, SKI %s" % (self, rc_name, ski))
+ q_pdu = rpki.up_down.revoke_pdu()
+ q_pdu.class_name = rc_name
+ q_pdu.ski = ski
+ self.query_up_down(q_pdu, lambda r_pdu: iterator(), eb)
+
+ rpki.async.iterator(skis_to_revoke, loop, cb)
+
+
def serve_revoke_forgotten(self, cb, eb):
"""
Handle a left-right revoke_forgotten action for this parent.
@@ -945,30 +990,55 @@ class parent_elt(data_elt):
require an explicit trigger.
"""
- def got_list(r_msg):
+ def got_skis(skis_from_parent):
+
+ def loop(iterator, item):
+ rc_name, skis_to_revoke = item
+ if rc_name in ca_map:
+ for ca_detail in ca_map[rc_name].issue_response_candidate_ca_details:
+ skis_to_revoke.discard(ca_detail.latest_ca_cert.gSKI())
+ self.revoke_skis(rc_name, skis_to_revoke, iterator, eb)
ca_map = dict((ca.parent_resource_class, ca) for ca in self.cas)
+ rpki.async.iterator(skis_from_parent.items(), loop, cb)
- def rc_loop(rc_iterator, rc):
+ self.get_skis(got_skis, eb)
- if rc.class_name in ca_map:
- def ski_loop(ski_iterator, ski):
- rpki.log.warn("Revoking certificates missing from our database, class %r, SKI %s" % (rc.class_name, ski))
- rpki.up_down.revoke_pdu.query(ca, ski, lambda x: ski_iterator(), eb)
+ def delete(self, cb, delete_parent = True):
+ """
+ Delete all the CA stuff under this parent, and perhaps the parent
+ itself.
+ """
- ca = ca_map[rc.class_name]
- skis_parent_knows_about = set(c.cert.gSKI() for c in rc.certs)
- skis_ca_knows_about = set(ca_detail.latest_ca_cert.gSKI() for ca_detail in ca.issue_response_candidate_ca_details)
- skis_only_parent_knows_about = skis_parent_knows_about - skis_ca_knows_about
- rpki.async.iterator(skis_only_parent_knows_about, ski_loop, rc_iterator)
+ def loop(iterator, ca):
+ self.gctx.checkpoint()
+ ca.delete(self, iterator)
- else:
- rc_iterator()
+ def revoke():
+ self.gctx.checkpoint()
+ self.serve_revoke_forgotten(done, fail)
+
+ def fail(e):
+ rpki.log.warn("Trouble getting parent to revoke certificates, blundering onwards: %s" % e)
+ done()
- rpki.async.iterator(r_msg.payload.classes, rc_loop, cb)
+ def done():
+ self.gctx.checkpoint()
+ self.gctx.sql.sweep()
+ if delete_parent:
+ self.sql_delete()
+ cb()
+
+ rpki.async.iterator(self.cas, loop, revoke)
+
+
+ def serve_destroy_hook(self, cb, eb):
+ """
+ Extra server actions when destroying a parent_elt.
+ """
- rpki.up_down.list_pdu.query(self, got_list, eb)
+ self.delete(cb, delete_parent = False)
def query_up_down(self, q_pdu, cb, eb):
diff --git a/rpkid/rpki/publication.py b/rpkid/rpki/publication.py
index 14e3d36a..f60e3af5 100644
--- a/rpkid/rpki/publication.py
+++ b/rpkid/rpki/publication.py
@@ -219,7 +219,7 @@ class publication_object_elt(rpki.xml_utils.base_elt, publication_namespace):
def serve_withdraw(self):
"""
- Withdraw an object.
+ Withdraw an object, then recursively delete empty directories.
"""
rpki.log.info("Withdrawing %s" % self.uri)
filename = self.uri_to_filename()
@@ -230,6 +230,15 @@ class publication_object_elt(rpki.xml_utils.base_elt, publication_namespace):
raise rpki.exceptions.NoObjectAtURI, "No object published at %s" % self.uri
else:
raise
+ min_path_len = len(self.gctx.publication_base.rstrip("/"))
+ dirname = os.path.dirname(filename)
+ while len(dirname) > min_path_len:
+ try:
+ os.rmdir(dirname)
+ except OSError:
+ break
+ else:
+ dirname = os.path.dirname(dirname)
def uri_to_filename(self):
"""
diff --git a/rpkid/rpki/relaxng.py b/rpkid/rpki/relaxng.py
index 24b3ab75..28fbe64a 100644
--- a/rpkid/rpki/relaxng.py
+++ b/rpkid/rpki/relaxng.py
@@ -6,7 +6,7 @@ import lxml.etree
## Parsed RelaxNG left_right schema
left_right = lxml.etree.RelaxNG(lxml.etree.fromstring('''<?xml version="1.0" encoding="UTF-8"?>
<!--
- $Id: left-right-schema.rnc 3730 2011-03-21 12:42:43Z sra $
+ $Id: left-right-schema.rnc 4346 2012-02-17 01:11:06Z sra $
RelaxNG Schema for RPKI left-right protocol.
@@ -881,7 +881,7 @@ left_right = lxml.etree.RelaxNG(lxml.etree.fromstring('''<?xml version="1.0" enc
<ref name="tag"/>
<ref name="self_handle"/>
<attribute name="asn">
- <data type="positiveInteger"/>
+ <data type="nonNegativeInteger"/>
</attribute>
<optional>
<attribute name="ipv4">
diff --git a/rpkid/rpki/rootd.py b/rpkid/rpki/rootd.py
index 8f3e6264..feceffc5 100644
--- a/rpkid/rpki/rootd.py
+++ b/rpkid/rpki/rootd.py
@@ -58,11 +58,15 @@ class issue_pdu(rpki.up_down.issue_pdu):
class revoke_pdu(rpki.up_down.revoke_pdu):
def serve_pdu(self, q_msg, r_msg, ignored, callback, errback):
- rootd.subject_cert = get_subject_cert()
+ rpki.log.debug("Revocation requested for SKI %s" % self.ski)
+ subject_cert = rootd.get_subject_cert()
if subject_cert is None or subject_cert.gSKI() != self.ski:
raise rpki.exceptions.NotInDatabase
+ now = rpki.sundial.now()
+ rootd.revoke_subject_cert(now)
rootd.del_subject_cert()
rootd.del_subject_pkcs10()
+ rootd.generate_crl_and_manifest(now)
r_msg.payload = rpki.up_down.revoke_response_pdu()
r_msg.payload.class_name = self.class_name
r_msg.payload.ski = self.ski
@@ -95,8 +99,6 @@ class cms_msg(rpki.up_down.cms_msg):
class main(object):
- rpki_root_cert = None
-
def get_root_cert(self):
rpki.log.debug("Read root cert %s" % self.rpki_root_cert_file)
self.rpki_root_cert = rpki.x509.X509(Auto_file = self.rpki_root_cert_file)
@@ -172,54 +174,65 @@ class main(object):
rpki.log.info("Generating subject cert with resources " + str(resources))
req_key = pkcs10.getPublicKey()
req_sia = pkcs10.get_SIA()
- crldp = self.rpki_base_uri + self.rpki_root_crl
- serial = now.totimestamp()
+ self.next_serial_number()
subject_cert = self.rpki_root_cert.issue(
keypair = self.rpki_root_key,
subject_key = req_key,
- serial = serial,
+ serial = self.serial_number,
sia = req_sia,
aia = self.rpki_root_cert_uri,
- crldp = crldp,
+ crldp = self.rpki_base_uri + self.rpki_root_crl,
resources = resources,
notAfter = now + self.rpki_subject_lifetime)
+ self.set_subject_cert(subject_cert)
+ self.generate_crl_and_manifest(now)
+ return subject_cert
+
+ def generate_crl_and_manifest(self, now):
+ subject_cert = self.get_subject_cert()
+ self.next_serial_number()
+ self.next_crl_number()
crl = rpki.x509.CRL.generate(
keypair = self.rpki_root_key,
issuer = self.rpki_root_cert,
- serial = serial,
+ serial = self.crl_number,
thisUpdate = now,
nextUpdate = now + self.rpki_subject_lifetime,
- revokedCertificates = ())
+ revokedCertificates = self.revoked)
rpki.log.debug("Writing CRL %s" % (self.rpki_root_dir + self.rpki_root_crl))
f = open(self.rpki_root_dir + self.rpki_root_crl, "wb")
f.write(crl.get_DER())
f.close()
+ manifest_content = [(self.rpki_root_crl, crl)]
+ if subject_cert is not None:
+ manifest_content.append((self.rpki_subject_cert, subject_cert))
manifest_resources = rpki.resource_set.resource_bag.from_inheritance()
manifest_keypair = rpki.x509.RSA.generate()
manifest_cert = self.rpki_root_cert.issue(
keypair = self.rpki_root_key,
subject_key = manifest_keypair.get_RSApublic(),
- serial = serial + 1,
+ serial = self.serial_number,
sia = ((rpki.oids.name2oid["id-ad-signedObject"],
("uri", self.rpki_base_uri + self.rpki_root_manifest)),),
aia = self.rpki_root_cert_uri,
- crldp = crldp,
+ crldp = self.rpki_base_uri + self.rpki_root_crl,
resources = manifest_resources,
notAfter = now + self.rpki_subject_lifetime,
is_ca = False)
manifest = rpki.x509.SignedManifest.build(
- serial = serial,
+ serial = self.crl_number,
thisUpdate = now,
nextUpdate = now + self.rpki_subject_lifetime,
- names_and_objs = [(self.rpki_subject_cert, subject_cert), (self.rpki_root_crl, crl)],
+ names_and_objs = manifest_content,
keypair = manifest_keypair,
certs = manifest_cert)
rpki.log.debug("Writing manifest %s" % (self.rpki_root_dir + self.rpki_root_manifest))
f = open(self.rpki_root_dir + self.rpki_root_manifest, "wb")
f.write(manifest.get_DER())
f.close()
- self.set_subject_cert(subject_cert)
- return subject_cert
+
+ def revoke_subject_cert(self, now):
+ self.revoked.append((self.get_subject_cert().getSerial(), now.toASN1tuple(), ()))
def compose_response(self, r_msg, pkcs10 = None):
subject_cert = self.issue_subject_cert_maybe(pkcs10)
@@ -260,11 +273,39 @@ class main(object):
rpki.log.traceback()
cb(500, reason = "Could not process PDU: %s" % e)
+
+ def next_crl_number(self):
+ if self.crl_number is None:
+ try:
+ crl = rpki.x509.CRL(DER_file = self.rpki_root_dir + self.rpki_root_crl)
+ self.crl_number = crl.get_POWpkix().getExtension(rpki.oids.name2oid["cRLNumber"])[2]
+ except:
+ self.crl_number = 0
+ self.crl_number += 1
+ return self.crl_number
+
+
+ def next_serial_number(self):
+ if self.serial_number is None:
+ subject_cert = self.get_subject_cert()
+ if subject_cert is not None:
+ self.serial_number = subject_cert.getSerial() + 1
+ else:
+ self.serial_number = 0
+ self.serial_number += 1
+ return self.serial_number
+
+
def __init__(self):
global rootd
rootd = self # Gross, but simpler than what we'd have to do otherwise
+ self.rpki_root_cert = None
+ self.serial_number = None
+ self.crl_number = None
+ self.revoked = []
+
os.environ["TZ"] = "UTC"
time.tzset()
@@ -286,6 +327,8 @@ class main(object):
self.cfg = rpki.config.parser(self.cfg_file, "rootd")
+ rpki.log.enable_tracebacks = True
+
self.cfg.set_global_flags()
self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta"))
diff --git a/rpkid/rpki/rpkid.py b/rpkid/rpki/rpkid.py
index 715a8aa2..7501a16a 100644
--- a/rpkid/rpki/rpkid.py
+++ b/rpkid/rpki/rpkid.py
@@ -462,8 +462,8 @@ class ca_obj(rpki.sql.sql_persistent):
if rc_cert is None:
- rpki.log.warn("SKI %s in resource class %s is in my database but missing from list_response received from %s, maybe parent certificate went away?"
- % (ca_detail.public_key.gSKI(), rc.class_name, parent.parent_handle))
+ rpki.log.warn("SKI %s in resource class %s is in database but missing from list_response to %s from %s, maybe parent certificate went away?"
+ % (ca_detail.public_key.gSKI(), rc.class_name, parent.self.self_handle, parent.parent_handle))
publisher = publication_queue()
ca_detail.delete(ca = ca_detail.ca, publisher = publisher)
return publisher.call_pubd(iterator, eb)
@@ -495,8 +495,10 @@ class ca_obj(rpki.sql.sql_persistent):
def done():
if cert_map:
- rpki.log.warn("Certificate SKIs in resource class %s in list_response from parent %s that are missing from our database: %s"
- % (rc.class_name, parent.parent_handle, ", ".join(c.cert.gSKI() for c in cert_map.values())))
+ rpki.log.warn("Unknown certificate SKI%s %s in resource class %s in list_response to %s from %s, maybe you want to \"revoke_forgotten\"?"
+ % ("" if len(cert_map) == 1 else "s",
+ ", ".join(c.cert.gSKI() for c in cert_map.values()),
+ rc.class_name, parent.self.self_handle, parent.parent_handle))
self.gctx.checkpoint()
cb()
@@ -509,19 +511,19 @@ class ca_obj(rpki.sql.sql_persistent):
for x in ca_details
if x.latest_ca_cert is not None)
for ski in skis_parent & skis_me:
- rpki.log.debug("Parent %s and I agree that I have SKI %s in resource class %s"
- % (parent.parent_handle, ski, rc.class_name))
+ rpki.log.debug("Parent %s agrees that %s has SKI %s in resource class %s"
+ % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name))
for ski in skis_parent - skis_me:
- rpki.log.debug("Parent %s thinks I have SKI %s in resource class %s but I don't think so"
- % (parent.parent_handle, ski, rc.class_name))
+ rpki.log.debug("Parent %s thinks %s has SKI %s in resource class %s but I don't think so"
+ % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name))
for ski in skis_me - skis_parent:
- rpki.log.debug("I think I have SKI %s in resource class %s but parent %s doesn't think so"
- % (ski, rc.class_name, parent.parent_handle))
+ rpki.log.debug("I think %s has SKI %s in resource class %s but parent %s doesn't think so"
+ % (parent.self.self_handle, ski, rc.class_name, parent.parent_handle))
if ca_details:
rpki.async.iterator(ca_details, loop, done)
else:
- rpki.log.warn("Existing resource class %s from parent %s with no certificates, rekeying" % (rc.class_name, parent.parent_handle))
+ rpki.log.warn("Existing resource class %s to %s from %s with no certificates, rekeying" % (rc.class_name, parent.self.self_handle, parent.parent_handle))
self.gctx.checkpoint()
self.rekey(cb, eb)
@@ -625,9 +627,10 @@ class ca_obj(rpki.sql.sql_persistent):
rpki.up_down.issue_pdu.query(parent, self, new_detail, done, eb)
- def revoke(self, cb, eb):
+ def revoke(self, cb, eb, revoke_all = False):
"""
- Revoke deprecated ca_detail objects associated with this ca.
+ Revoke deprecated ca_detail objects associated with this CA, or
+ all ca_details associated with this CA if revoke_all is set.
"""
rpki.log.trace()
@@ -635,7 +638,9 @@ class ca_obj(rpki.sql.sql_persistent):
def loop(iterator, ca_detail):
ca_detail.revoke(cb = iterator, eb = eb)
- rpki.async.iterator(self.deprecated_ca_details, loop, cb)
+ ca_details = self.ca_details if revoke_all else self.deprecated_ca_details
+
+ rpki.async.iterator(ca_details, loop, cb)
def reissue(self, cb, eb):
"""
@@ -763,17 +768,23 @@ class ca_detail_obj(rpki.sql.sql_persistent):
self.state = "active"
self.generate_crl(publisher = publisher)
self.generate_manifest(publisher = publisher)
- self.sql_mark_dirty()
+ self.sql_store()
if predecessor is not None:
predecessor.state = "deprecated"
- predecessor.sql_mark_dirty()
+ predecessor.sql_store()
for child_cert in predecessor.child_certs:
child_cert.reissue(ca_detail = self, publisher = publisher)
for roa in predecessor.roas:
roa.regenerate(publisher = publisher)
# Need to do something to regenerate ghostbusters here?
+ # Yes, I suspect so, since presumably we want the ghostbuster to
+ # be issued by the new ca_detail at this point. But check code.
+
+ if predecessor.ghostbusters:
+ rpki.log.warn("Probably should be regenerating Ghostbusters %r here" % ghostbuster)
+
publisher.call_pubd(callback, errback)
@@ -842,6 +853,8 @@ class ca_detail_obj(rpki.sql.sql_persistent):
if r_msg.payload.ski != self.latest_ca_cert.gSKI():
raise rpki.exceptions.SKIMismatch
+ rpki.log.debug("Parent revoked %s, starting cleanup" % self.latest_ca_cert.gSKI())
+
crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval)
nextUpdate = rpki.sundial.now()
@@ -881,6 +894,7 @@ class ca_detail_obj(rpki.sql.sql_persistent):
self.sql_mark_dirty()
publisher.call_pubd(cb, eb)
+ rpki.log.debug("Asking parent to revoke CA certificate %s" % self.latest_ca_cert.gSKI())
rpki.up_down.revoke_pdu.query(ca, self.latest_ca_cert.gSKI(), parent_revoked, eb)
def update(self, parent, ca, rc, sia_uri_changed, old_resources, callback, errback):
@@ -963,8 +977,7 @@ class ca_detail_obj(rpki.sql.sql_persistent):
containing the newly issued cert.
"""
- assert child_cert is None or (child_cert.child_id == child.child_id and
- child_cert.ca_detail_id == self.ca_detail_id)
+ assert child_cert is None or child_cert.child_id == child.child_id
cert = self.latest_ca_cert.issue(
keypair = self.private_key_id,
@@ -985,6 +998,7 @@ class ca_detail_obj(rpki.sql.sql_persistent):
rpki.log.debug("Created new child_cert %r" % child_cert)
else:
child_cert.cert = cert
+ child_cert.ca_detail_id = self.ca_detail_id
rpki.log.debug("Reusing existing child_cert %r" % child_cert)
child_cert.ski = cert.get_SKI()
@@ -1155,7 +1169,7 @@ class child_cert_obj(rpki.sql.sql_persistent):
"""
return self.ca_detail.ca.sia_uri + self.uri_tail
- def revoke(self, publisher, generate_crl_and_manifest = False):
+ def revoke(self, publisher, generate_crl_and_manifest = True):
"""
Revoke a child cert.
"""
@@ -1207,7 +1221,7 @@ class child_cert_obj(rpki.sql.sql_persistent):
needed = True
if ca_detail != old_ca_detail:
- rpki.log.debug("Issuer changed for %r" % self)
+ rpki.log.debug("Issuer changed for %r %s" % (self, self.uri))
needed = True
must_revoke = old_resources.oversized(resources) or old_resources.valid_until > resources.valid_until
@@ -1215,11 +1229,6 @@ class child_cert_obj(rpki.sql.sql_persistent):
rpki.log.debug("Must revoke any existing cert(s) for %r" % self)
needed = True
- new_issuer = ca_detail != old_ca_detail
- if new_issuer:
- rpki.log.debug("Issuer changed for %r" % self)
- needed = True
-
if resources.valid_until != old_resources.valid_until:
rpki.log.debug("Validity changed for %r: %s %s" % (self, old_resources.valid_until, resources.valid_until))
needed = True
@@ -1245,7 +1254,7 @@ class child_cert_obj(rpki.sql.sql_persistent):
subject_key = self.cert.getPublicKey(),
sia = sia,
resources = resources,
- child_cert = None if must_revoke or new_issuer else self,
+ child_cert = None if must_revoke else self,
publisher = publisher)
rpki.log.debug("New child_cert %r uri %s" % (child_cert, child_cert.uri))
@@ -1496,10 +1505,12 @@ class roa_obj(rpki.sql.sql_persistent):
ca_detail = self.ca_detail
if ca_detail is None or ca_detail.state != "active" or ca_detail.has_expired():
+ rpki.log.debug("Searching for new ca_detail for ROA %r" % self)
ca_detail = None
for parent in self.self.parents:
for ca in parent.cas:
ca_detail = ca.active_ca_detail
+ assert ca_detail is None or ca_detail.state == "active"
if ca_detail is not None and not ca_detail.has_expired():
resources = ca_detail.latest_ca_cert.get_3779resources()
if v4.issubset(resources.v4) and v6.issubset(resources.v6):
@@ -1507,10 +1518,15 @@ class roa_obj(rpki.sql.sql_persistent):
ca_detail = None
if ca_detail is not None:
break
+ else:
+ rpki.log.debug("Keeping old ca_detail for ROA %r" % self)
if ca_detail is None:
raise rpki.exceptions.NoCoveringCertForROA, "Could not find a certificate covering %r" % self
+ rpki.log.debug("Using new ca_detail %r for ROA %r, ca_detail_state %s" % (
+ ca_detail, self, ca_detail.state))
+
ca = ca_detail.ca
resources = rpki.resource_set.resource_bag(v4 = v4, v6 = v6)
keypair = rpki.x509.RSA.generate()
@@ -1530,6 +1546,7 @@ class roa_obj(rpki.sql.sql_persistent):
if not fast:
ca_detail.generate_manifest(publisher = publisher)
+
def published_callback(self, pdu):
"""
Check publication result.
@@ -1559,8 +1576,8 @@ class roa_obj(rpki.sql.sql_persistent):
roa = self.roa
uri = self.uri
- if ca_detail.state != 'active':
- self.ca_detail_id = None
+ rpki.log.debug("Regenerating ROA %r, ca_detail %r state is %s" % (
+ self, ca_detail, ca_detail.state))
if regenerate:
self.generate(publisher = publisher, fast = fast)
@@ -1569,7 +1586,10 @@ class roa_obj(rpki.sql.sql_persistent):
rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail)
publisher.withdraw(cls = rpki.publication.roa_elt, uri = uri, obj = roa, repository = ca_detail.ca.parent.repository,
handler = False if allow_failure else None)
- self.sql_mark_deleted()
+
+ if not regenerate:
+ self.sql_mark_deleted()
+
if not fast:
ca_detail.generate_crl(publisher = publisher)
ca_detail.generate_manifest(publisher = publisher)
diff --git a/rpkid/tests/revoke.yaml b/rpkid/tests/revoke.yaml
new file mode 100644
index 00000000..c006460d
--- /dev/null
+++ b/rpkid/tests/revoke.yaml
@@ -0,0 +1,198 @@
+# $Id: smoketest.1.yaml 3881 2011-06-17 18:32:54Z sra $
+
+# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+#
+# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+name: RIR
+crl_interval: 5m
+regen_margin: 2m
+valid_for: 2d
+kids:
+ - name: R0
+ kids:
+ - name: Alice
+ ipv4: 192.0.2.1-192.0.2.33
+ asn: 64533
+ roa_request:
+ - asn: 42
+ ipv4: 192.0.2.32/32
+ - name: Bob
+ ipv4: 192.0.2.44-192.0.2.100
+ ipv4: 10.3.0.0/16
+ roa_request:
+ - asn: 666
+ ipv4: 10.3.0.44/32
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
+
+
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ rekey:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- name: R0
+ revoke:
+- sleep 10
+---
+- shell sleep 1; dir=rcynic.`date +%s`.data; mkdir $dir; cd rcynic-data; pax -rwl . ../$dir; find . -type f -name '*.cer' | sort | xargs ../../../../utils/uri/uri -s >../${dir%.data}.uris; sleep 1
+- sleep 30
diff --git a/rpkid/tests/smoketest.py b/rpkid/tests/smoketest.py
index fa686afd..3cb90d11 100644
--- a/rpkid/tests/smoketest.py
+++ b/rpkid/tests/smoketest.py
@@ -1264,6 +1264,7 @@ def run_rcynic():
subprocess.check_call((prog_rcynic, "-c", rcynic_name + ".conf"), env = env)
subprocess.call(rcynic_stats, shell = True, env = env)
last_rcynic_run = int(time.time())
+ os.link("%s.xml" % rcynic_name, "%s.%s.xml" % (rcynic_name, last_rcynic_run))
def mangle_sql(filename):
"""
diff --git a/rtr-origin/rtr-origin.py b/rtr-origin/rtr-origin.py
index 3b6ec145..9064e77a 100755
--- a/rtr-origin/rtr-origin.py
+++ b/rtr-origin/rtr-origin.py
@@ -1060,11 +1060,12 @@ class pdu_channel(asynchat.async_chat):
"""
Handle errors caught by asyncore main loop.
"""
- if backtrace_on_exceptions:
+ c, e = sys.exc_info()[:2]
+ if backtrace_on_exceptions or e == 0:
for line in traceback.format_exc().splitlines():
log(line)
else:
- log("[Exception: %s]" % sys.exc_info()[1])
+ log("[Exception: %s: %s]" % (c.__name__, e)
log("[Exiting after unhandled exception]")
sys.exit(1)
@@ -1356,11 +1357,12 @@ class kickme_channel(asyncore.dispatcher):
"""
Handle errors caught by asyncore main loop.
"""
- if backtrace_on_exceptions:
+ c, e = sys.exc_info()[:2]
+ if backtrace_on_exceptions or e == 0:
for line in traceback.format_exc().splitlines():
log(line)
else:
- log("[Exception: %s]" % sys.exc_info()[1])
+ log("[Exception: %s: %s]" % (c.__name__, e)
log("[Exiting after unhandled exception]")
sys.exit(1)
diff --git a/scripts/analyze-rcynic-history.py b/scripts/analyze-rcynic-history.py
index 7d918198..f45a0578 100644
--- a/scripts/analyze-rcynic-history.py
+++ b/scripts/analyze-rcynic-history.py
@@ -19,12 +19,18 @@ OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
"""
-plot_all_hosts = False
-plot_to_one = True
-plot_to_many = True
-write_rcynic_xml = True
+plot_all_hosts = False
-import mailbox, sys, urlparse, os, getopt, datetime, subprocess
+window_hours = 72
+
+import mailbox
+import sys
+import urlparse
+import os
+import getopt
+import datetime
+import subprocess
+import shelve
from xml.etree.cElementTree import (ElementTree as ElementTree,
fromstring as ElementTreeFromString)
@@ -73,12 +79,14 @@ class Host(object):
del self.uris
@property
- def failure_rate_percentage(self):
- return float(self.dead_connections * 100) / float(self.connection_count)
+ def failed(self):
+ return 1 if self.dead_connections else 0
@property
def seconds_per_object(self):
- return float(self.elapsed.total_seconds()) / float(self.object_count)
+ return float(self.elapsed.days * 24 * 60 * 60 +
+ self.elapsed.seconds +
+ self.elapsed.microseconds / 10**6) / float(self.object_count)
@property
def objects_per_connection(self):
@@ -86,16 +94,19 @@ class Host(object):
@property
def average_connection_time(self):
- return float(self.total_connection_time.total_seconds()) / float(self.connection_count)
+ return float(self.total_connection_time.days * 24 * 60 * 60 +
+ self.total_connection_time.seconds +
+ self.total_connection_time.microseconds / 10**6) / float(self.connection_count)
class Format(object):
- def __init__(self, attr, title, fmt):
+ def __init__(self, attr, title, fmt, ylabel = ""):
self.attr = attr
self.title = title
self.width = len(title) - int("%" in fmt)
self.fmt = "%%%d%s" % (self.width, fmt)
self.oops = "*" * self.width
+ self.ylabel = ylabel
def __call__(self, obj):
try:
@@ -103,21 +114,14 @@ class Host(object):
except ZeroDivisionError:
return self.oops
- format = (Format("connection_count", "Connections", "d"),
- Format("object_count", "Objects", "d"),
- Format("objects_per_connection", "Objects/Connection", ".3f"),
- Format("seconds_per_object", "Seconds/Object", ".3f"),
- Format("failure_rate_percentage", "Failure Rate", ".3f%%"),
- Format("average_connection_time", "Average Connection", ".3f"),
+ format = (Format("connection_count", "Connections", "d", "Connections To Repository (Per Session)"),
+ Format("object_count", "Objects", "d", "Objects In Repository (Distinct URIs Per Session)"),
+ Format("objects_per_connection", "Objects/Connection", ".3f", "Objects In Repository / Connections To Repository"),
+ Format("seconds_per_object", "Seconds/Object", ".3f", "Seconds To Transfer / Object (Average Per Session)"),
+ Format("failure_rate_running", "Failure Rate", ".3f%%", "Sessions With Failed Connections Within Last %d Hours" % window_hours),
+ Format("average_connection_time", "Average Connection", ".3f", "Seconds / Connection (Average Per Session)"),
Format("hostname", "Hostname", "s"))
- separator = " " * 2
-
- header = separator.join(fmt.title for fmt in format)
-
- def __str__(self):
- return self.separator.join(fmt(self) for fmt in self.format)
-
format_dict = dict((fmt.attr, fmt) for fmt in format)
def format_field(self, name):
@@ -132,6 +136,8 @@ class Session(dict):
def __init__(self, session_id, msg_key):
self.session_id = session_id
self.msg_key = msg_key
+ self.date = parse_utc(session_id)
+ self.calculated_failure_history = False
@property
def hostnames(self):
@@ -154,9 +160,22 @@ class Session(dict):
for h in self.itervalues():
h.finalize()
+ def calculate_failure_history(self, sessions):
+ start = self.date - datetime.timedelta(hours = window_hours)
+ sessions = tuple(s for s in sessions if s.date <= self.date and s.date > start)
+ for hostname, h in self.iteritems():
+ i = n = 0
+ for s in sessions:
+ if hostname in s:
+ i += s[hostname].failed
+ n += 1
+ h.failure_rate_running = float(100 * i) / n
+ self.calculated_failure_history = True
+
def plotter(f, hostnames, field, logscale = False):
plotlines = sorted(session.get_plot_row(field, hostnames) for session in sessions)
title = Host.format_dict[field].title
+ ylabel = Host.format_dict[field].ylabel
n = len(hostnames) + 1
assert all(n == len(plotline) for plotline in plotlines)
if "%%" in Host.format_dict[field].fmt:
@@ -171,15 +190,18 @@ def plotter(f, hostnames, field, logscale = False):
set xdata time
set timefmt '%Y-%m-%dT%H:%M:%SZ'
#set format x '%m/%d'
- set format x '%b%d'
+ #set format x '%b%d'
+ #set format x '%Y-%m-%d'
+ set format x '%Y-%m'
#set title '""" + title + """'
+ set ylabel '""" + ylabel + """'
plot""" + ",".join(" '-' using 1:2 with linespoints pointinterval 500 title '%s'" % h for h in hostnames) + "\n")
for i in xrange(1, n):
for plotline in plotlines:
f.write("%s %s\n" % (plotline[0], plotline[i].rstrip("%")))
f.write("e\n")
-def plot_many(hostnames, fields):
+def plot_hosts(hostnames, fields):
for field in fields:
for logscale in (False, True):
gnuplot = subprocess.Popen(("gnuplot",), stdin = subprocess.PIPE)
@@ -189,70 +211,67 @@ def plot_many(hostnames, fields):
gnuplot.stdin.close()
gnuplot.wait()
-def plot_one(hostnames, fields):
- gnuplot = subprocess.Popen(("gnuplot",), stdin = subprocess.PIPE)
- gnuplot.stdin.write("set terminal pdf\n")
- gnuplot.stdin.write("set output 'analyze-rcynic-history.pdf'\n")
- for field in fields:
- if field != "hostname":
- plotter(gnuplot.stdin, hostnames, field, logscale = False)
- plotter(gnuplot.stdin, hostnames, field, logscale = True)
- gnuplot.stdin.close()
- gnuplot.wait()
-
mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False)
+if sys.platform == "darwin": # Sigh
+ shelf = shelve.open("rcynic-xml", "c")
+else:
+ shelf = shelve.open("rcynic-xml.db", "c")
+
sessions = []
latest = None
for i, key in enumerate(mb.iterkeys(), 1):
+ sys.stderr.write("\r%s Reading %d/%d..." % ("|\\-/"[i & 3], i, len(mb)))
- sys.stderr.write("\r%s %d/%d..." % ("|\\-/"[i & 3], i, len(mb)))
+ if key in shelf:
+ session = shelf[key]
- assert not mb[key].is_multipart()
-
- input = ElementTreeFromString(mb[key].get_payload())
-
- date = input.get("date")
-
- sys.stderr.write("%s..." % date)
+ else:
+ assert not mb[key].is_multipart()
+ input = ElementTreeFromString(mb[key].get_payload())
+ date = input.get("date")
+ sys.stderr.write("%s..." % date)
+ session = Session(date, key)
+ for elt in input.findall("rsync_history"):
+ session.add_rsync_history(Rsync_History(elt))
+ for elt in input.findall("validation_status"):
+ if elt.get("generation") == "current":
+ session.add_uri(elt.text.strip())
+ session.finalize()
+ shelf[key] = session
- session = Session(date, key)
sessions.append(session)
-
if latest is None or session.session_id > latest.session_id:
latest = session
- for elt in input.findall("rsync_history"):
- session.add_rsync_history(Rsync_History(elt))
-
- for elt in input.findall("validation_status"):
- if elt.get("generation") == "current":
- session.add_uri(elt.text.strip())
+sys.stderr.write("\n")
- session.finalize()
+shelf.sync()
+for i, session in enumerate(sessions, 1):
+ sys.stderr.write("\r%s Failure history %d/%d...%s..." % ("|\\-/"[i & 3], i, len(sessions), session.session_id))
+ if not getattr(session, "calculated_failure_history", False):
+ session.calculate_failure_history(sessions)
+ shelf[session.msg_key] = session
sys.stderr.write("\n")
if plot_all_hosts:
- hostnames = set()
- for session in sessions:
- hostnames.update(session.hostnames)
- hostnames = sorted(hostnames)
+ hostnames = sorted(reduce(lambda x, y: x | y,
+ (s.hostnames for s in sessions),
+ set()))
else:
hostnames = ("rpki.apnic.net", "rpki.ripe.net", "repository.lacnic.net",
- "rpki.afrinic.net", "arin.rpki.net", "rgnet.rpki.net",
- "rpki-pilot.arin.net")
+ "rpki.afrinic.net", "rpki-pilot.arin.net",
+ "arin.rpki.net", "rgnet.rpki.net")
-fields = [fmt.attr for fmt in Host.format if fmt.attr != "hostname"]
-if plot_to_one:
- plot_one(hostnames, fields)
-if plot_to_many:
- plot_many(hostnames, fields)
+plot_hosts(hostnames, [fmt.attr for fmt in Host.format if fmt.attr != "hostname"])
-if write_rcynic_xml and latest is not None:
+if latest is not None:
f = open("rcynic.xml", "wb")
f.write(mb[latest.msg_key].get_payload())
f.close()
+
+shelf.close()
diff --git a/scripts/roa-to-irr.py b/scripts/roa-to-irr.py
index 09526328..d02e1e3a 100644
--- a/scripts/roa-to-irr.py
+++ b/scripts/roa-to-irr.py
@@ -23,7 +23,7 @@ right.
$Id$
-Copyright (C) 2010 Internet Systems Consortium ("ISC")
+Copyright (C) 2010-2012 Internet Systems Consortium ("ISC")
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
@@ -38,7 +38,14 @@ OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
"""
-import os, socket, sys, getopt, errno, time, rpki.x509, rpki.ipaddrs
+import os
+import socket
+import sys
+import getopt
+import errno
+import time
+import rpki.x509
+import rpki.ipaddrs
class route_virtual(object):
"""
@@ -71,14 +78,17 @@ class route_virtual(object):
return result
def __str__(self):
- return "".join((
- ("%-14s%s/%s\n" % (self.label, self.prefix, self.prefixlen)),
- ("descr: %s/%s-%s\n" % (self.prefix, self.prefixlen, self.max_prefixlen)),
- ("origin: AS%d\n" % self.asn),
- ("notify: %s\n" % irr_notify),
- ("mnt-by: %s\n" % irr_mnt_by),
- ("changed: %s %s\n" % (irr_changed_by, self.date)),
- ("source: %s\n" % irr_source)))
+ lines = (
+ "%-14s%s/%s" % (self.label, self.prefix, self.prefixlen),
+ "descr: %s/%s-%s" % (self.prefix, self.prefixlen, self.max_prefixlen),
+ "origin: AS%d" % self.asn,
+ "notify: %s" % irr_notify,
+ "mnt-by: %s" % irr_mnt_by,
+ "changed: %s %s" % (irr_changed_by, self.date),
+ "source: %s" % irr_source,
+ "override: %s" % password if password is not None else None,
+ "")
+ return "\n".join(line for line in lines if line is not None)
def write(self, output_directory):
name = "%s-%s-%s-AS%d-%s" % (self.prefix, self.prefixlen, self.max_prefixlen, self.asn, self.date)
@@ -136,8 +146,10 @@ irr_source = "RPKI"
irr_from = whoami
output = None
email = False
+password = None
-options = ["changed_by=", "email", "from=", "help", "mnt_by=", "notify=", "output=", "source="]
+options = ["changed_by=", "email", "from=", "help", "mnt_by=",
+ "notify=", "output=", "password=", "source="]
def usage(code = 1):
f = sys.stderr if code else sys.stdout
@@ -147,11 +159,10 @@ def usage(code = 1):
f.write(__doc__)
sys.exit(code)
-opts, argv = getopt.getopt(sys.argv[1:], "c:ef:hm:n:o:s:?", options)
+opts, argv = getopt.getopt(sys.argv[1:], "c:ef:hm:n:o:p:s:?", options)
for o, a in opts:
if o in ("-h", "--help", "-?"):
- print __doc__
- sys.exit(0)
+ usage(0)
elif o in ("-c", "--changed_by"):
irr_changed_by = a
elif o in ("-e", "--email"):
@@ -164,6 +175,8 @@ for o, a in opts:
irr_notify = a
elif o in ("-o", "--output"):
output = a
+ elif o in ("-p", "--password"):
+ password = a
elif o in ("-s", "--source"):
source = a
else:
diff --git a/scripts/testbed-rootcert.py b/scripts/testbed-rootcert.py
index e4cf304f..2227f110 100644
--- a/scripts/testbed-rootcert.py
+++ b/scripts/testbed-rootcert.py
@@ -42,7 +42,7 @@ CN = Pseudo-%(HOLDER)s testbed root RPKI certificat
basicConstraints = critical,CA:true
subjectKeyIdentifier = hash
keyUsage = critical,keyCertSign,cRLSign
-subjectInfoAccess = 1.3.6.1.5.5.7.48.5;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/,1.3.6.1.5.5.7.48.10;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/root.mnf
+subjectInfoAccess = 1.3.6.1.5.5.7.48.5;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/,1.3.6.1.5.5.7.48.10;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/root.mft
certificatePolicies = critical,1.3.6.1.5.5.7.14.2
sbgp-autonomousSysNum = critical,@rfc3779_asns
sbgp-ipAddrBlock = critical,@rfc3997_addrs