aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile.in16
-rwxr-xr-xca/irbe_cli5
-rwxr-xr-xca/rpki-sql-setup18
-rwxr-xr-xca/rpkigui-query-routes16
-rw-r--r--ca/tests/sql-cleaner.py13
-rw-r--r--rpki/config.py6
-rw-r--r--rpki/exceptions.py282
-rw-r--r--rpki/fields.py12
-rw-r--r--rpki/http.py9
-rw-r--r--rpki/irdb/migrations/0004_auto_20151018_1603.py40
-rw-r--r--rpki/irdb/models.py6
-rw-r--r--rpki/irdb/zookeeper.py62
-rw-r--r--rpki/left_right.py1120
-rw-r--r--rpki/log.py45
-rw-r--r--rpki/rpkid.py2321
-rw-r--r--rpki/rpkid_tasks.py417
-rw-r--r--rpki/rpkidb/migrations/0003_auto_20151018_1600.py30
-rw-r--r--rpki/rpkidb/migrations/0004_auto_20151018_1602.py30
-rw-r--r--rpki/rpkidb/migrations/0005_auto_20151018_1613.py37
-rw-r--r--rpki/rpkidb/models.py322
-rw-r--r--rpki/sql.py472
-rw-r--r--rpki/sql_schemas.py335
-rw-r--r--rpki/x509.py37
-rw-r--r--schemas/sql/pubd.sql85
-rw-r--r--schemas/sql/rpkid.sql240
25 files changed, 833 insertions, 5143 deletions
diff --git a/Makefile.in b/Makefile.in
index faba9054..9d53cf7a 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -54,9 +54,6 @@ RNGS = schemas/relaxng/left-right.rng \
schemas/relaxng/rrdp.rng \
schemas/relaxng/oob-setup.rng
-SQLS = schemas/sql/rpkid.sql \
- schemas/sql/pubd.sql
-
default: all
all: VERSION rpki/autoconf.py setup_autoconf.py
@@ -106,7 +103,6 @@ rpki-all: \
${abs_top_srcdir}/h/rpki/sk_manifest.h \
${abs_top_srcdir}/h/rpki/sk_roa.h \
${abs_top_srcdir}/rpki/relaxng.py \
- ${abs_top_srcdir}/rpki/sql_schemas.py \
${POW_SO} \
build/stamp
@@ -186,10 +182,6 @@ ${abs_top_srcdir}/rpki/relaxng.py: buildtools/make-relaxng.py ${RNGS}
cd schemas/relaxng; ${PYTHON} ${abs_top_srcdir}/buildtools/make-relaxng.py *.rng >$@.tmp
mv $@.tmp $@
-${abs_top_srcdir}/rpki/sql_schemas.py: buildtools/make-sql-schemas.py ${SQLS}
- cd schemas/sql; ${PYTHON} ${abs_top_srcdir}/buildtools/make-sql-schemas.py >$@.tmp
- mv $@.tmp $@
-
schemas/relaxng/left-right.rng: schemas/relaxng/left-right.rnc
${TRANG} schemas/relaxng/left-right.rnc schemas/relaxng/left-right.rng
@@ -217,14 +209,10 @@ schemas/relaxng/oob-setup.rng: schemas/relaxng/oob-setup.rnc
# Eg: PYLINT_FLAGS='--disable=W0311'
lint: .FORCE
- pylint --rcfile ${abs_top_srcdir}/buildtools/pylint.rc ${PYLINT_FLAGS} \
- rpki `find rp ca -type f -perm -1 -print | xargs grep -El '^#!.+python'`
+ pylint --rcfile ${abs_top_srcdir}/buildtools/pylint.rc ${PYLINT_FLAGS} rpki `find rp ca -type f -perm -1 ! -name '*~' -print | xargs grep -El '^#!.+python'`
tags: Makefile .FORCE
- find rpki rp ca schemas -type f \
- \( -name '*.[ch]' -o -name '*.py' -o -name '*.sql' -o -name '*.rnc' \) \
- ! -name relaxng.py ! -name sql_schemas.py -print | \
- etags -
+ { find rpki rp ca schemas -type f \( -name '*.[ch]' -o -name '*.py' -o -name '*.sql' -o -name '*.rnc' \) ! -name relaxng.py -print; find rp ca -type f -perm -1 ! -name '*~' -print | xargs grep -El '^#!.+python'; } | etags -
# This isn't all that useful until SQL has been set up. Might want to
# hack up something using ca/rpki-confgen and ca/rpki-sql-setup.
diff --git a/ca/irbe_cli b/ca/irbe_cli
index 2edde024..ebd5d2a5 100755
--- a/ca/irbe_cli
+++ b/ca/irbe_cli
@@ -37,6 +37,7 @@ Command line IR back-end control program for rpkid and pubd.
# Command line processing of this program is too complex and
# idiosyncratic to be worth trying to reimplement using argparse.
+import os
import sys
import getopt
import textwrap
@@ -206,7 +207,7 @@ class left_right_msg(cmd_msg_mixin, rpki.left_right.msg):
for x in (self_elt, bsc_elt, parent_elt, child_elt, repository_elt,
list_published_objects_elt, list_received_resources_elt, report_error_elt))
-class left_right_sax_handler(rpki.left_right.sax_handler):
+class left_right_sax_handler(rpki.left_right.sax_handler): # pylint: disable=W0232
pdu = left_right_msg
class left_right_cms_msg(rpki.left_right.cms_msg):
@@ -250,7 +251,7 @@ class publication_msg(cmd_msg_mixin, rpki.publication.msg):
manifest_elt, roa_elt, report_error_elt,
ghostbuster_elt))
-class publication_sax_handler(rpki.publication.sax_handler):
+class publication_sax_handler(rpki.publication.sax_handler): # pylint: disable=W0232
pdu = publication_msg
class publication_cms_msg(rpki.publication.cms_msg):
diff --git a/ca/rpki-sql-setup b/ca/rpki-sql-setup
index 848e3d0f..b209fec7 100755
--- a/ca/rpki-sql-setup
+++ b/ca/rpki-sql-setup
@@ -27,7 +27,11 @@ import datetime
import rpki.config
import rpki.version
import rpki.autoconf
-import rpki.sql_schemas
+
+# This program implements its own schema versioning system as a poor
+# substitute for schema migrations. Now that we're moving to Django
+# ORM, this is pretty much useless, and should be removed at some point.
+
from rpki.mysql_import import MySQLdb, _mysql_exceptions
@@ -142,14 +146,6 @@ class UserDB(object):
self.db.commit()
log("Updated %s to %s" % (self.name, v))
- @property
- def schema(self):
- lines = []
- for line in getattr(rpki.sql_schemas, self.name, "").splitlines():
- line = " ".join(line.split())
- if line and not line.startswith("--"):
- lines.append(line)
- return [statement.strip() for statement in " ".join(lines).rstrip(";").split(";") if statement.strip()]
class Version(object):
@@ -215,10 +211,6 @@ def do_create(name):
(db.password,))
root.db.commit()
db.open()
- for statement in db.schema:
- if not statement.upper().startswith("DROP TABLE"):
- log(statement)
- db.cur.execute(statement)
db.version = current_version
db.close()
diff --git a/ca/rpkigui-query-routes b/ca/rpkigui-query-routes
index 179f8c2c..dc2835a0 100755
--- a/ca/rpkigui-query-routes
+++ b/ca/rpkigui-query-routes
@@ -49,17 +49,17 @@ qs = rv.RouteOrigin.objects.filter(
prefix_max__gte=r.max
)
-def validity_marker(route, roa, roa_prefix):
- "Return + if the roa would cause the route to be accepted, or - if not"
- # we already know the ROA covers this route because they are returned
- # from RouteOrigin.roas, so just check the ASN and max prefix length
- return '-' if (roa.asid == 0 or route.asn != roa.asid or
- route.prefixlen > roa_prefix.max_length) else '+'
-
# xxx.xxx.xxx.xxx/xx-xx is 22 characters
+# we already know the ROA covers this route because they are returned
+# from RouteOrigin.roas, so just check the ASN and max prefix length
+
for route in qs:
print route.as_resource_range(), route.asn, route.status
for pfx in route.roa_prefixes:
for roa in pfx.roas.all():
- print validity_marker(route, roa, pfx), pfx.as_roa_prefix(), roa.asid, roa.repo.uri
+ if roa.asid == 0 or route.asn != roa.asid or route.prefixlen > pfx.max_length:
+ validity_marker = '-'
+ else:
+ validity_marker = '+'
+ print validity_marker, pfx.as_roa_prefix(), roa.asid, roa.repo.uri
print
diff --git a/ca/tests/sql-cleaner.py b/ca/tests/sql-cleaner.py
index 369a68ea..828100a4 100644
--- a/ca/tests/sql-cleaner.py
+++ b/ca/tests/sql-cleaner.py
@@ -19,7 +19,6 @@
"""
import rpki.config
-import rpki.sql_schemas
from rpki.mysql_import import MySQLdb
cfg = rpki.config.parser(section = "yamltest", allow_missing = True)
@@ -29,15 +28,6 @@ for name in ("rpkid", "irdbd", "pubd"):
username = cfg.get("%s_sql_username" % name, name[:4])
password = cfg.get("%s_sql_password" % name, "fnord")
- # All of this schema creation stuff will go away once we're on Django ORM.
- # For the moment, a quick kludge for testing.
- schema = []
- if name == "rpkid":
- for line in getattr(rpki.sql_schemas, name, "").splitlines():
- schema.extend(line.partition("--")[0].split())
- schema = " ".join(schema).strip(";").split(";")
- schema = [statement.strip() for statement in schema if statement and "DROP TABLE" not in statement]
-
db = MySQLdb.connect(user = username, passwd = password)
cur = db.cursor()
@@ -57,8 +47,5 @@ for name in ("rpkid", "irdbd", "pubd"):
cur.execute("DROP TABLE %s" % table)
cur.execute("SET foreign_key_checks = 1")
- for statement in schema:
- cur.execute(statement)
-
cur.close()
db.close()
diff --git a/rpki/config.py b/rpki/config.py
index 0be0d1a0..3234294c 100644
--- a/rpki/config.py
+++ b/rpki/config.py
@@ -207,7 +207,6 @@ class parser(object):
# pylint: disable=W0621
import rpki.http
import rpki.x509
- import rpki.sql
import rpki.async
import rpki.log
import rpki.daemonize
@@ -250,11 +249,6 @@ class parser(object):
pass
try:
- rpki.sql.sql_persistent.sql_debug = self.getboolean("sql_debug")
- except ConfigParser.NoOptionError:
- pass
-
- try:
rpki.async.timer.gc_debug = self.getboolean("gc_debug")
except ConfigParser.NoOptionError:
pass
diff --git a/rpki/exceptions.py b/rpki/exceptions.py
index 3ca8bd81..b6889b0d 100644
--- a/rpki/exceptions.py
+++ b/rpki/exceptions.py
@@ -22,281 +22,169 @@ Exception definitions for RPKI modules.
"""
class RPKI_Exception(Exception):
- """
- Base class for RPKI exceptions.
- """
+ "Base class for RPKI exceptions."
class NotInDatabase(RPKI_Exception):
- """
- Lookup failed for an object expected to be in the database.
- """
+ "Lookup failed for an object expected to be in the database."
class BadURISyntax(RPKI_Exception):
- """
- Illegal syntax for a URI.
- """
+ "Illegal syntax for a URI."
class BadStatusCode(RPKI_Exception):
- """
- Unrecognized protocol status code.
- """
+ "Unrecognized protocol status code."
class BadQuery(RPKI_Exception):
- """
- Unexpected protocol query.
- """
+ "Unexpected protocol query."
class DBConsistancyError(RPKI_Exception):
- """
- Found multiple matches for a database query that shouldn't ever
- return that.
- """
+ "Found multiple matches for a database query that shouldn't ever return that."
class CMSVerificationFailed(RPKI_Exception):
- """
- Verification of a CMS message failed.
- """
+ "Verification of a CMS message failed."
class HTTPRequestFailed(RPKI_Exception):
- """
- HTTP request failed.
- """
+ "HTTP request failed."
class DERObjectConversionError(RPKI_Exception):
- """
- Error trying to convert a DER-based object from one representation
- to another.
- """
+ "Error trying to convert a DER-based object from one representation to another."
class NotACertificateChain(RPKI_Exception):
- """
- Certificates don't form a proper chain.
- """
+ "Certificates don't form a proper chain."
class BadContactURL(RPKI_Exception):
- """
- Error trying to parse contact URL.
- """
+ "Error trying to parse contact URL."
class BadClassNameSyntax(RPKI_Exception):
- """
- Illegal syntax for a class_name.
- """
+ "Illegal syntax for a class_name."
class BadIssueResponse(RPKI_Exception):
- """
- issue_response PDU with wrong number of classes or certificates.
- """
+ "issue_response PDU with wrong number of classes or certificates."
class NotImplementedYet(RPKI_Exception):
- """
- Internal error -- not implemented yet.
- """
+ "Internal error -- not implemented yet."
class BadPKCS10(RPKI_Exception):
- """
- Bad PKCS #10 object.
- """
+ "Bad PKCS #10 object."
class UpstreamError(RPKI_Exception):
- """
- Received an error from upstream.
- """
+ "Received an error from upstream."
class ChildNotFound(RPKI_Exception):
- """
- Could not find specified child in database.
- """
+ "Could not find specified child in database."
class BSCNotFound(RPKI_Exception):
- """
- Could not find specified BSC in database.
- """
+ "Could not find specified BSC in database."
class BadSender(RPKI_Exception):
- """
- Unexpected XML sender value.
- """
+ "Unexpected XML sender value."
class ClassNameMismatch(RPKI_Exception):
- """
- class_name does not match child context.
- """
+ "class_name does not match child context."
class ClassNameUnknown(RPKI_Exception):
- """
- Unknown class_name.
- """
+ "Unknown class_name."
class SKIMismatch(RPKI_Exception):
- """
- SKI value in response does not match request.
- """
+ "SKI value in response does not match request."
class SubprocessError(RPKI_Exception):
- """
- Subprocess returned unexpected error.
- """
+ "Subprocess returned unexpected error."
class BadIRDBReply(RPKI_Exception):
- """
- Unexpected reply to IRDB query.
- """
+ "Unexpected reply to IRDB query."
class NotFound(RPKI_Exception):
- """
- Object not found in database.
- """
+ "Object not found in database."
class MustBePrefix(RPKI_Exception):
- """
- Resource range cannot be expressed as a prefix.
- """
+ "Resource range cannot be expressed as a prefix."
class TLSValidationError(RPKI_Exception):
- """
- TLS certificate validation error.
- """
+ "TLS certificate validation error."
class MultipleTLSEECert(TLSValidationError):
- """
- Received more than one TLS EE certificate.
- """
+ "Received more than one TLS EE certificate."
class ReceivedTLSCACert(TLSValidationError):
- """
- Received CA certificate via TLS.
- """
+ "Received CA certificate via TLS."
class WrongEContentType(RPKI_Exception):
- """
- Received wrong CMS eContentType.
- """
+ "Received wrong CMS eContentType."
class EmptyPEM(RPKI_Exception):
- """
- Couldn't find PEM block to convert.
- """
+ "Couldn't find PEM block to convert."
class UnexpectedCMSCerts(RPKI_Exception):
- """
- Received CMS certs when not expecting any.
- """
+ "Received CMS certs when not expecting any."
class UnexpectedCMSCRLs(RPKI_Exception):
- """
- Received CMS CRLs when not expecting any.
- """
+ "Received CMS CRLs when not expecting any."
class MissingCMSEEcert(RPKI_Exception):
- """
- Didn't receive CMS EE cert when expecting one.
- """
+ "Didn't receive CMS EE cert when expecting one."
class MissingCMSCRL(RPKI_Exception):
- """
- Didn't receive CMS CRL when expecting one.
- """
+ "Didn't receive CMS CRL when expecting one."
class UnparsableCMSDER(RPKI_Exception):
- """
- Alleged CMS DER wasn't parsable.
- """
+ "Alleged CMS DER wasn't parsable."
class CMSCRLNotSet(RPKI_Exception):
- """
- CMS CRL has not been configured.
- """
+ "CMS CRL has not been configured."
class ServerShuttingDown(RPKI_Exception):
- """
- Server is shutting down.
- """
+ "Server is shutting down."
class NoActiveCA(RPKI_Exception):
- """
- No active ca_detail for specified class.
- """
+ "No active ca_detail for specified class."
class BadClientURL(RPKI_Exception):
- """
- URL given to HTTP client does not match profile.
- """
+ "URL given to HTTP client does not match profile."
class ClientNotFound(RPKI_Exception):
- """
- Could not find specified client in database.
- """
+ "Could not find specified client in database."
class BadExtension(RPKI_Exception):
- """
- Forbidden X.509 extension.
- """
+ "Forbidden X.509 extension."
class ForbiddenURI(RPKI_Exception):
- """
- Forbidden URI, does not start with correct base URI.
- """
+ "Forbidden URI, does not start with correct base URI."
class HTTPClientAborted(RPKI_Exception):
- """
- HTTP client connection closed while in request-sent state.
- """
+ "HTTP client connection closed while in request-sent state."
class BadPublicationReply(RPKI_Exception):
- """
- Unexpected reply to publication query.
- """
+ "Unexpected reply to publication query."
class DuplicateObject(RPKI_Exception):
- """
- Attempt to create an object that already exists.
- """
+ "Attempt to create an object that already exists."
class EmptyROAPrefixList(RPKI_Exception):
- """
- Can't create ROA with an empty prefix list.
- """
+ "Can't create ROA with an empty prefix list."
class NoCoveringCertForROA(RPKI_Exception):
- """
- Couldn't find a covering certificate to generate ROA.
- """
+ "Couldn't find a covering certificate to generate ROA."
class BSCNotReady(RPKI_Exception):
- """
- BSC not yet in a usable state, signing_cert not set.
- """
+ "BSC not yet in a usable state, signing_cert not set."
class HTTPUnexpectedState(RPKI_Exception):
- """
- HTTP event occurred in an unexpected state.
- """
+ "HTTP event occurred in an unexpected state."
class HTTPBadVersion(RPKI_Exception):
- """
- HTTP couldn't parse HTTP version.
- """
+ "HTTP couldn't parse HTTP version."
class HandleTranslationError(RPKI_Exception):
- """
- Internal error translating protocol handle -> SQL id.
- """
+ "Internal error translating protocol handle -> SQL id."
class NoObjectAtURI(RPKI_Exception):
- """
- No object published at specified URI.
- """
+ "No object published at specified URI."
class ExistingObjectAtURI(RPKI_Exception):
- """
- An object has already been published at specified URI.
- """
+ "An object has already been published at specified URI."
class DifferentObjectAtURI(RPKI_Exception):
- """
- An object with a different hash exists at specified URI.
- """
+ "An object with a different hash exists at specified URI."
class CMSContentNotSet(RPKI_Exception):
"""
@@ -307,76 +195,46 @@ class CMSContentNotSet(RPKI_Exception):
"""
class HTTPTimeout(RPKI_Exception):
- """
- HTTP connection timed out.
- """
+ "HTTP connection timed out."
class BadIPResource(RPKI_Exception):
- """
- Parse failure for alleged IP resource string.
- """
+ "Parse failure for alleged IP resource string."
class BadROAPrefix(RPKI_Exception):
- """
- Parse failure for alleged ROA prefix string.
- """
+ "Parse failure for alleged ROA prefix string."
class CommandParseFailure(RPKI_Exception):
- """
- Failed to parse command line.
- """
+ "Failed to parse command line."
class CMSCertHasExpired(RPKI_Exception):
- """
- CMS certificate has expired.
- """
+ "CMS certificate has expired."
class TrustedCMSCertHasExpired(RPKI_Exception):
- """
- Trusted CMS certificate has expired.
- """
+ "Trusted CMS certificate has expired."
class MultipleCMSEECert(RPKI_Exception):
- """
- Can't have more than one CMS EE certificate in validation chain.
- """
+ "Can't have more than one CMS EE certificate in validation chain."
class ResourceOverlap(RPKI_Exception):
- """
- Overlapping resources in resource_set.
- """
+ "Overlapping resources in resource_set."
class CMSReplay(RPKI_Exception):
- """
- Possible CMS replay attack detected.
- """
+ "Possible CMS replay attack detected."
class PastNotAfter(RPKI_Exception):
- """
- Requested notAfter value is already in the past.
- """
+ "Requested notAfter value is already in the past."
class NullValidityInterval(RPKI_Exception):
- """
- Requested validity interval is null.
- """
+ "Requested validity interval is null."
class BadX510DN(RPKI_Exception):
- """
- X.510 distinguished name does not match profile.
- """
+ "X.510 distinguished name does not match profile."
class BadAutonomousSystemNumber(RPKI_Exception):
- """
- Bad AutonomousSystem number.
- """
+ "Bad AutonomousSystem number."
class WrongEKU(RPKI_Exception):
- """
- Extended Key Usage extension does not match profile.
- """
+ "Extended Key Usage extension does not match profile."
class UnexpectedUpDownResponse(RPKI_Exception):
- """
- Up-down message is not of the expected type.
- """
+ "Up-down message is not of the expected type."
diff --git a/rpki/fields.py b/rpki/fields.py
index 4a826f4e..6c71ac35 100644
--- a/rpki/fields.py
+++ b/rpki/fields.py
@@ -47,6 +47,7 @@ class EnumField(models.PositiveSmallIntegerField):
def __init__(self, *args, **kwargs):
if isinstance(kwargs.get("choices"), (tuple, list)) and isinstance(kwargs["choices"][0], (str, unicode)):
kwargs["choices"] = tuple(enumerate(kwargs["choices"], 1))
+ # Might need something here to handle string-valued default parameter
models.PositiveSmallIntegerField.__init__(self, *args, **kwargs)
self.enum_i2s = dict(self.flatchoices)
self.enum_s2i = dict((v, k) for k, v in self.flatchoices)
@@ -148,6 +149,9 @@ class DERField(BlobField):
__metaclass__ = models.SubfieldBase
def to_python(self, value):
+ if value is not None and not isinstance(value, (self.rpki_type, str)):
+ logger.warning("Why am I now seeing a %r instead of str or %r in the %r rpki.fields.DERField.to_python() method?",
+ type(value), self.rpki_type, type(self))
assert value is None or isinstance(value, (self.rpki_type, str))
if isinstance(value, str):
return self.rpki_type(DER = value)
@@ -165,10 +169,16 @@ class CertificateField(DERField):
description = "X.509 certificate"
rpki_type = rpki.x509.X509
-class KeyField(DERField):
+class RSAPrivateKeyField(DERField):
description = "RSA keypair"
rpki_type = rpki.x509.RSA
+KeyField = RSAPrivateKeyField # XXX backwards compatability
+
+class PublicKeyField(DERField):
+ description = "RSA keypair"
+ rpki_type = rpki.x509.PublicKey
+
class CRLField(DERField):
description = "Certificate Revocation List"
rpki_type = rpki.x509.CRL
diff --git a/rpki/http.py b/rpki/http.py
index 16ed0453..b991eeb0 100644
--- a/rpki/http.py
+++ b/rpki/http.py
@@ -186,15 +186,20 @@ class http_message(object):
Format an outgoing HTTP message.
"""
- s = self.format_first_line()
+ s = str(self.format_first_line())
+ assert isinstance(s, str)
if self.body is not None:
assert isinstance(self.body, str)
self.headers["Content-Length"] = len(self.body)
for kv in self.headers.iteritems():
- s += "%s: %s\r\n" % kv
+ h = str("%s: %s\r\n" % kv)
+ assert isinstance(h, str)
+ s += h
s += "\r\n"
+ assert isinstance(s, str)
if self.body is not None:
s += self.body
+ assert isinstance(s, str)
return s
def __str__(self):
diff --git a/rpki/irdb/migrations/0004_auto_20151018_1603.py b/rpki/irdb/migrations/0004_auto_20151018_1603.py
new file mode 100644
index 00000000..645d1eaa
--- /dev/null
+++ b/rpki/irdb/migrations/0004_auto_20151018_1603.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import rpki.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('irdb', '0003_repository_rrdp_notification_uri'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='referral',
+ name='private_key',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, blank=True),
+ ),
+ migrations.AlterField(
+ model_name='resourceholderca',
+ name='private_key',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, blank=True),
+ ),
+ migrations.AlterField(
+ model_name='rootd',
+ name='private_key',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, blank=True),
+ ),
+ migrations.AlterField(
+ model_name='serverca',
+ name='private_key',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, blank=True),
+ ),
+ migrations.AlterField(
+ model_name='serveree',
+ name='private_key',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, blank=True),
+ ),
+ ]
diff --git a/rpki/irdb/models.py b/rpki/irdb/models.py
index 0911d7aa..d2d6256b 100644
--- a/rpki/irdb/models.py
+++ b/rpki/irdb/models.py
@@ -33,7 +33,7 @@ import rpki.resource_set
import socket
import rpki.POW
-from rpki.fields import EnumField, SundialField, CertificateField, DERField, KeyField, CRLField, PKCS10Field
+from rpki.fields import EnumField, SundialField, CertificateField, DERField, RSAPrivateKeyField, CRLField, PKCS10Field
## @var ip_version_choices
# Choice argument for fields implementing IP version numbers.
@@ -139,7 +139,7 @@ class ResourceHolderEEManager(CertificateManager):
class CA(django.db.models.Model):
certificate = CertificateField()
- private_key = KeyField()
+ private_key = RSAPrivateKeyField()
latest_crl = CRLField()
# Might want to bring these into line with what rpkid does. Current
@@ -299,7 +299,7 @@ class ResourceHolderRevocation(Revocation):
issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "revocations")
class EECertificate(Certificate):
- private_key = KeyField()
+ private_key = RSAPrivateKeyField()
class Meta:
abstract = True
diff --git a/rpki/irdb/zookeeper.py b/rpki/irdb/zookeeper.py
index 4b4a2f46..6a355f9e 100644
--- a/rpki/irdb/zookeeper.py
+++ b/rpki/irdb/zookeeper.py
@@ -583,6 +583,9 @@ class Zookeeper(object):
x = etree_read(filename)
+ if x.tag != tag_oob_child_request:
+ raise BadXMLMessage("Expected %s, got %s", tag_oob_child_request, x.tag)
+
if child_handle is None:
child_handle = x.get("child_handle")
@@ -674,6 +677,9 @@ class Zookeeper(object):
x = etree_read(filename)
+ if x.tag != tag_oob_parent_response:
+ raise BadXMLMessage("Expected %s, got %s", tag_oob_parent_response, x.tag)
+
if parent_handle is None:
parent_handle = x.get("parent_handle")
@@ -757,6 +763,9 @@ class Zookeeper(object):
x = etree_read(filename)
+ if x.tag != tag_oob_publisher_request:
+ raise BadXMLMessage("Expected %s, got %s", tag_oob_publisher_request, x.tag)
+
client_ta = rpki.x509.X509(Base64 = x.findtext(tag_oob_publisher_bpki_ta))
referral = x.find(tag_oob_referral)
@@ -868,7 +877,10 @@ class Zookeeper(object):
x = etree_read(filename)
- self.log("Repository calls us %r" % (x.get("client_handle")))
+ if x.tag != tag_oob_repository_response:
+ raise BadXMLMessage("Expected %s, got %s", tag_oob_repository_response, x.tag)
+
+ self.log("Repository calls us %r" % (x.get("publisher_handle")))
if parent_handle is not None:
self.log("Explicit parent_handle given")
@@ -1231,18 +1243,18 @@ class Zookeeper(object):
throw exceptions as needed.
"""
- if any(r_pdu.tag in (rpki.left_right.tag_report_error,
- rpki.publication_control.tag_report_error)
- for r_pdu in r_msg):
- for r_pdu in r_msg:
- if r_pdu.tag == rpki.left_right.tag_report_error:
- self.log("rpkid reported failure: %s" % r_pdu.get("error_code"))
- elif r_pdu.tag == rpki.publication_control.tag_report_error:
- self.log("pubd reported failure: %s" % r_pdu.get("error_code"))
- else:
- continue
- if r_pdu.text:
- self.log(r_pdu.text)
+ failed = False
+ for r_pdu in r_msg.getiterator(rpki.left_right.tag_report_error):
+ failed = True
+ self.log("rpkid reported failure: %s" % r_pdu.get("error_code"))
+ if r_pdu.text:
+ self.log(r_pdu.text)
+ for r_pdu in r_msg.getiterator(rpki.publication_control.tag_report_error):
+ failed = True
+ self.log("pubd reported failure: %s" % r_pdu.get("error_code"))
+ if r_pdu.text:
+ self.log(r_pdu.text)
+ if failed:
raise CouldntTalkToDaemon
@@ -1343,24 +1355,18 @@ class Zookeeper(object):
r_msg = self.call_rpkid(q_msg, suppress_error_check = True)
- if r_msg[0].tag == rpki.left_right.tag_self:
- self.check_error_report(r_msg)
- self_pdu = r_msg[0]
- else:
- self_pdu = None
+ self.check_error_report(r_msg)
+
+ self_pdu = r_msg.find(rpki.left_right.tag_self)
bsc_pdus = dict((r_pdu.get("bsc_handle"), r_pdu)
- for r_pdu in r_msg
- if r_pdu.tag == rpki.left_right.tag_bsc)
+ for r_pdu in r_msg.getiterator(rpki.left_right.tag_bsc))
repository_pdus = dict((r_pdu.get("repository_handle"), r_pdu)
- for r_pdu in r_msg
- if r_pdu.tag == rpki.left_right.tag_repository)
+ for r_pdu in r_msg.getiterator(rpki.left_right.tag_repository))
parent_pdus = dict((r_pdu.get("parent_handle"), r_pdu)
- for r_pdu in r_msg
- if r_pdu.tag == rpki.left_right.tag_parent)
+ for r_pdu in r_msg.getiterator(rpki.left_right.tag_parent))
child_pdus = dict((r_pdu.get("child_handle"), r_pdu)
- for r_pdu in r_msg
- if r_pdu.tag == rpki.left_right.tag_child)
+ for r_pdu in r_msg.getiterator(rpki.left_right.tag_child))
q_msg = self._compose_left_right_query()
@@ -1408,8 +1414,8 @@ class Zookeeper(object):
SubElement(q_msg, rpki.left_right.tag_bsc, action = "list", tag = "bsc", self_handle = ca.handle)
r_msg = self.call_rpkid(q_msg)
bsc_pdus = dict((r_pdu.get("bsc_handle"), r_pdu)
- for r_pdu in r_msg
- if r_pdu.tag == rpki.left_right.tag_bsc and r_pdu.get("action") == "list")
+ for r_pdu in r_msg.getiterator(rpki.left_right.tag_bsc)
+ if r_pdu.get("action") == "list")
bsc_pdu = bsc_pdus.pop(bsc_handle, None)
q_msg = self._compose_left_right_query()
diff --git a/rpki/left_right.py b/rpki/left_right.py
index 7189f888..87082106 100644
--- a/rpki/left_right.py
+++ b/rpki/left_right.py
@@ -21,12 +21,9 @@
RPKI "left-right" protocol.
"""
-import base64
import logging
-import collections
import rpki.x509
-import rpki.sql
import rpki.exceptions
import rpki.http
import rpki.up_down
@@ -37,7 +34,6 @@ import rpki.publication
import rpki.async
import rpki.rpkid_tasks
-from lxml.etree import Element, SubElement, tostring as ElementToString
logger = logging.getLogger(__name__)
@@ -67,1122 +63,6 @@ tag_signing_cert = xmlns + "signing_cert"
tag_signing_cert_crl = xmlns + "signing_cert_crl"
-class base_elt(rpki.sql.sql_persistent):
- """
- Virtual class for persistent left-right protocol elements.
- These classes are being phased out in favor of Django ORM models.
- """
-
- handles = ()
- attributes = ()
- elements = ()
- booleans = ()
-
- self_id = None
- self_handle = None
-
- def __str__(self):
- return ElementToString(self.toXML(), pretty_print = True, encoding = "us-ascii")
-
- @classmethod
- def fromXML(cls, elt):
- self = cls()
- for key in self.attributes:
- val = elt.get(key, None)
- if val is not None:
- val = val.encode("ascii")
- if isinstance(self.attributes, dict) and self.attributes[key] is not None:
- val = self.attributes[key](val)
- elif val.isdigit() and not key.endswith("_handle"):
- val = long(val)
- setattr(self, key, val)
- for key in self.booleans:
- setattr(self, key, elt.get(key, False))
- for b64 in elt:
- assert b64.tag.startswith(xmlns)
- setattr(self, b64.tag[len(xmlns):], self.elements[b64.tag](Base64 = b64.text))
- return self
-
- def toXML(self):
- elt = Element(self.element_name, nsmap = nsmap)
- for key in self.attributes:
- val = getattr(self, key, None)
- if val is not None:
- elt.set(key, str(val))
- for key in self.booleans:
- if getattr(self, key, False):
- elt.set(key, "yes")
- for name in self.elements:
- value = getattr(self, name[len(xmlns):], None)
- if value is not None and not value.empty():
- SubElement(elt, name, nsmap = nsmap).text = value.get_Base64()
- return elt
-
- def make_reply(self, r_pdu = None):
- if r_pdu is None:
- r_pdu = self.__class__()
- self.make_reply_clone_hook(r_pdu)
- handle_name = self.element_name[len(xmlns):] + "_handle"
- setattr(r_pdu, handle_name, getattr(self, handle_name, None))
- else:
- self.make_reply_clone_hook(r_pdu)
- for b in r_pdu.booleans:
- setattr(r_pdu, b, False)
- r_pdu.action = self.action
- r_pdu.tag = self.tag
- return r_pdu
-
- def serve_fetch_one(self):
- """
- Find the object on which a get, set, or destroy method should
- operate.
- """
-
- r = self.serve_fetch_one_maybe()
- if r is None:
- raise rpki.exceptions.NotFound
- return r
-
- def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb):
- cb()
-
- def serve_create(self, r_msg, cb, eb):
- r_pdu = self.make_reply()
- def one():
- self.sql_store()
- setattr(r_pdu, self.sql_template.index, getattr(self, self.sql_template.index))
- self.serve_post_save_hook(self, r_pdu, two, eb)
- def two():
- r_msg.append(r_pdu)
- cb()
- oops = self.serve_fetch_one_maybe()
- if oops is not None:
- raise rpki.exceptions.DuplicateObject("Object already exists: %r[%r] %r[%r]" % (
- self, getattr(self, self.element_name[len(xmlns):] + "_handle"),
- oops, getattr(oops, oops.element_name[len(xmlns):] + "_handle")))
- self.serve_pre_save_hook(self, r_pdu, one, eb)
-
- def serve_set(self, r_msg, cb, eb):
- db_pdu = self.serve_fetch_one()
- r_pdu = self.make_reply()
- for a in db_pdu.sql_template.columns[1:]:
- v = getattr(self, a, None)
- if v is not None:
- setattr(db_pdu, a, v)
- db_pdu.sql_mark_dirty()
- def one():
- db_pdu.sql_store()
- db_pdu.serve_post_save_hook(self, r_pdu, two, eb)
- def two():
- r_msg.append(r_pdu)
- cb()
- db_pdu.serve_pre_save_hook(self, r_pdu, one, eb)
-
- def serve_get(self, r_msg, cb, eb):
- r_pdu = self.serve_fetch_one()
- self.make_reply(r_pdu)
- r_msg.append(r_pdu)
- cb()
-
- def serve_list(self, r_msg, cb, eb):
- for r_pdu in self.serve_fetch_all():
- self.make_reply(r_pdu)
- r_msg.append(r_pdu)
- cb()
-
- def serve_destroy_hook(self, cb, eb):
- cb()
-
- def serve_destroy(self, r_msg, cb, eb):
- def done():
- db_pdu.sql_delete()
- r_msg.append(self.make_reply())
- cb()
- db_pdu = self.serve_fetch_one()
- db_pdu.serve_destroy_hook(done, eb)
-
- def serve_dispatch(self, r_msg, cb, eb):
- # Transition hack: handle the .toXML() call for old handlers.
- fake_r_msg = []
- def fake_convert():
- r_msg.extend(r_pdu.toXML() if isinstance(r_pdu, base_elt) else r_pdu
- for r_pdu in fake_r_msg)
- def fake_cb():
- fake_convert()
- cb()
- def fake_eb(e):
- fake_convert()
- eb(e)
- method = getattr(self, "serve_" + self.action, None)
- if method is None:
- raise rpki.exceptions.BadQuery("Unexpected query: action %s" % self.action)
- method(fake_r_msg, fake_cb, fake_eb)
-
- def unimplemented_control(self, *controls):
- unimplemented = [x for x in controls if getattr(self, x, False)]
- if unimplemented:
- raise rpki.exceptions.NotImplementedYet("Unimplemented control %s" % ", ".join(unimplemented))
-
- @property
- @rpki.sql.cache_reference
- def self(self):
- return self_elt.sql_fetch(self.gctx, self.self_id)
-
- @property
- @rpki.sql.cache_reference
- def bsc(self):
- return bsc_elt.sql_fetch(self.gctx, self.bsc_id)
-
- def make_reply_clone_hook(self, r_pdu):
- if r_pdu.self_handle is None:
- r_pdu.self_handle = self.self_handle
- for tag, elt in self.handles:
- id_name = tag + "_id"
- handle_name = tag + "_handle"
- if getattr(r_pdu, handle_name, None) is None:
- try:
- setattr(r_pdu, handle_name, getattr(elt.sql_fetch(self.gctx, getattr(r_pdu, id_name)), handle_name))
- except AttributeError:
- continue
-
- @classmethod
- def serve_fetch_handle(cls, gctx, self_id, handle):
- name = cls.element_name[len(xmlns):]
- return cls.sql_fetch_where1(gctx, name + "_handle = %s AND self_id = %s", (handle, self_id))
-
- def serve_fetch_one_maybe(self):
- name = self.element_name[len(xmlns):]
- where = "%s.%s_handle = %%s AND %s.self_id = self.self_id AND self.self_handle = %%s" % (name, name, name)
- args = (getattr(self, name + "_handle"), self.self_handle)
- logger.debug(".serve_fetch_one_maybe() %s %s", args[0], args[1])
- return self.sql_fetch_where1(self.gctx, where, args, "self")
-
- def serve_fetch_all(self):
- name = self.element_name[len(xmlns):]
- where = "%s.self_id = self.self_id and self.self_handle = %%s" % name
- return self.sql_fetch_where(self.gctx, where, (self.self_handle,), "self")
-
- def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb):
- # self is always the object to be saved to SQL.
- for tag, elt in self.handles:
- id_name = tag + "_id"
- if getattr(self, id_name, None) is None:
- x = elt.serve_fetch_handle(self.gctx, self.self_id, getattr(q_pdu, tag + "_handle"))
- if x is None:
- raise rpki.exceptions.HandleTranslationError("Could not translate %r %s_handle" % (self, tag))
- setattr(self, id_name, getattr(x, id_name))
- cb()
-
-
-class self_elt(base_elt):
- """
- <self/> element.
- """
-
- element_name = xmlns + "self"
- attributes = ("action", "tag", "self_handle", "crl_interval", "regen_margin")
- booleans = ("rekey", "reissue", "revoke", "run_now", "publish_world_now", "revoke_forgotten",
- "clear_replay_protection")
-
- elements = collections.OrderedDict((
- (tag_bpki_cert, rpki.x509.X509),
- (tag_bpki_glue, rpki.x509.X509)))
-
- sql_template = rpki.sql.template(
- "self",
- "self_id",
- "self_handle",
- "use_hsm",
- "crl_interval",
- "regen_margin",
- ("bpki_cert", rpki.x509.X509),
- ("bpki_glue", rpki.x509.X509))
-
- handles = ()
-
- use_hsm = False
- crl_interval = None
- regen_margin = None
- bpki_cert = None
- bpki_glue = None
- cron_tasks = None
-
- def __repr__(self):
- return rpki.log.log_repr(self)
-
- @property
- def bscs(self):
- return bsc_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- @property
- def repositories(self):
- return repository_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- @property
- def parents(self):
- return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- @property
- def children(self):
- return child_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- @property
- def roas(self):
- return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- @property
- def ghostbusters(self):
- return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- @property
- def ee_certificates(self):
- return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb):
- actions = []
- if q_pdu.rekey:
- actions.append(self.serve_rekey)
- if q_pdu.revoke:
- actions.append(self.serve_revoke)
- if q_pdu.reissue:
- actions.append(self.serve_reissue)
- if q_pdu.revoke_forgotten:
- actions.append(self.serve_revoke_forgotten)
- if q_pdu.publish_world_now:
- actions.append(self.serve_publish_world_now)
- if q_pdu.run_now:
- actions.append(self.serve_run_now)
- if q_pdu.clear_replay_protection:
- actions.append(self.serve_clear_replay_protection)
- def loop(iterator, action):
- action(iterator, eb)
- rpki.async.iterator(actions, loop, cb)
-
- def serve_rekey(self, cb, eb):
- def loop(iterator, parent):
- parent.serve_rekey(iterator, eb)
- rpki.async.iterator(self.parents, loop, cb)
-
- def serve_revoke(self, cb, eb):
- def loop(iterator, parent):
- parent.serve_revoke(iterator, eb)
- rpki.async.iterator(self.parents, loop, cb)
-
- def serve_reissue(self, cb, eb):
- def loop(iterator, parent):
- parent.serve_reissue(iterator, eb)
- rpki.async.iterator(self.parents, loop, cb)
-
- def serve_revoke_forgotten(self, cb, eb):
- def loop(iterator, parent):
- parent.serve_revoke_forgotten(iterator, eb)
- rpki.async.iterator(self.parents, loop, cb)
-
- def serve_clear_replay_protection(self, cb, eb):
- def loop(iterator, obj):
- obj.serve_clear_replay_protection(iterator, eb)
- rpki.async.iterator(self.parents + self.children + self.repositories, loop, cb)
-
- def serve_destroy_hook(self, cb, eb):
- def loop(iterator, parent):
- parent.delete(iterator)
- rpki.async.iterator(self.parents, loop, cb)
-
-
- def serve_publish_world_now(self, cb, eb):
- publisher = rpki.rpkid.publication_queue()
- repositories = set()
- objects = dict()
-
- def loop(iterator, parent):
- repository = parent.repository
- if repository.peer_contact_uri in repositories:
- return iterator()
- repositories.add(repository.peer_contact_uri)
- q_msg = Element(rpki.publication.tag_msg, nsmap = rpki.publication.nsmap,
- type = "query", version = rpki.publication.version)
- SubElement(q_msg, rpki.publication.tag_list, tag = "list")
- def list_handler(r_pdu):
- rpki.publication.raise_if_error(r_pdu)
- assert r_pdu.tag == rpki.publication.tag_list
- assert r_pdu.get("uri") not in objects
- objects[r_pdu.get("uri")] = (r_pdu.get("hash"), repository)
- repository.call_pubd(iterator, eb,
- q_msg, length_check = False,
- handlers = dict(list = list_handler))
-
- def reconcile(uri, obj, repository):
- h, r = objects.pop(uri, (None, None))
- if h is not None:
- assert r == repository
- publisher.queue(uri = uri, new_obj = obj, old_hash = h, repository = repository)
-
- def done():
- for parent in self.parents:
- repository = parent.repository
- for ca in parent.cas:
- ca_detail = ca.active_ca_detail
- if ca_detail is not None:
- reconcile(uri = ca_detail.crl_uri,
- obj = ca_detail.latest_crl,
- repository = repository)
- reconcile(uri = ca_detail.manifest_uri,
- obj = ca_detail.latest_manifest,
- repository = repository)
- for c in ca_detail.child_certs:
- reconcile(uri = c.uri,
- obj = c.cert,
- repository = repository)
- for r in ca_detail.roas:
- if r.roa is not None:
- reconcile(uri = r.uri,
- obj = r.roa,
- repository = repository)
- for g in ca_detail.ghostbusters:
- reconcile(uri = g.uri,
- obj = g.ghostbuster,
- repository = repository)
- for c in ca_detail.ee_certificates:
- reconcile(uri = c.uri,
- obj = c.cert,
- repository = repository)
- for u in objects:
- h, r = objects[u]
- publisher.queue(uri = u, old_hash = h, repository = r)
- publisher.call_pubd(cb, eb)
-
- rpki.async.iterator(self.parents, loop, done)
-
- def serve_run_now(self, cb, eb):
- logger.debug("Forced immediate run of periodic actions for self %s[%d]",
- self.self_handle, self.self_id)
- completion = rpki.rpkid_tasks.CompletionHandler(cb)
- self.schedule_cron_tasks(completion)
- assert completion.count > 0
- self.gctx.task_run()
-
- def serve_fetch_one_maybe(self):
- """
- Find the self object upon which a get, set, or destroy action
- should operate, or which would conflict with a create method.
- """
-
- return self.serve_fetch_handle(self.gctx, None, self.self_handle)
-
- @classmethod
- def serve_fetch_handle(cls, gctx, self_id, self_handle):
- return cls.sql_fetch_where1(gctx, "self_handle = %s", (self_handle,))
-
- def serve_fetch_all(self):
- """
- Find the self objects upon which a list action should operate.
- This is different from the list action for all other objects,
- where list only works within a given self_id context.
- """
-
- return self.sql_fetch_all(self.gctx)
-
- def schedule_cron_tasks(self, completion):
- if self.cron_tasks is None:
- self.cron_tasks = tuple(task(self) for task in rpki.rpkid_tasks.task_classes)
- for task in self.cron_tasks:
- self.gctx.task_add(task)
- completion.register(task)
-
- def find_covering_ca_details(self, resources):
- """
- Return all active ca_detail_objs for this <self/> which cover a
- particular set of resources.
-
- If we expected there to be a large number of ca_detail_objs, we
- could add index tables and write fancy SQL query to do this, but
- for the expected common case where there are only one or two
- active ca_detail_objs per <self/>, it's probably not worth it. In
- any case, this is an optimization we can leave for later.
- """
-
- results = set()
- for parent in self.parents:
- for ca in parent.cas:
- ca_detail = ca.active_ca_detail
- if ca_detail is not None and ca_detail.covers(resources):
- results.add(ca_detail)
- return results
-
-
-class bsc_elt(base_elt):
- """
- <bsc/> (Business Signing Context) element.
- """
-
- element_name = xmlns + "bsc"
- attributes = ("action", "tag", "self_handle", "bsc_handle", "key_type", "hash_alg", "key_length")
- booleans = ("generate_keypair",)
-
- elements = collections.OrderedDict((
- (tag_signing_cert, rpki.x509.X509),
- (tag_signing_cert_crl, rpki.x509.CRL),
- (tag_pkcs10_request, rpki.x509.PKCS10)))
-
- sql_template = rpki.sql.template(
- "bsc",
- "bsc_id",
- "bsc_handle",
- "self_id",
- "hash_alg",
- ("private_key_id", rpki.x509.RSA),
- ("pkcs10_request", rpki.x509.PKCS10),
- ("signing_cert", rpki.x509.X509),
- ("signing_cert_crl", rpki.x509.CRL))
-
- handles = (("self", self_elt),)
-
- private_key_id = None
- pkcs10_request = None
- signing_cert = None
- signing_cert_crl = None
-
- def __repr__(self):
- return rpki.log.log_repr(self, self.bsc_handle)
-
- @property
- def repositories(self):
- return repository_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,))
-
- @property
- def parents(self):
- return parent_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,))
-
- @property
- def children(self):
- return child_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,))
-
- def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb):
- """
- Extra server actions -- handle key generation, only RSA with SHA-256 for now.
- """
-
- if q_pdu.generate_keypair:
- assert q_pdu.key_type in (None, "rsa") and q_pdu.hash_alg in (None, "sha256")
- self.private_key_id = rpki.x509.RSA.generate(keylength = q_pdu.key_length or 2048)
- self.pkcs10_request = rpki.x509.PKCS10.create(keypair = self.private_key_id)
- r_pdu.pkcs10_request = self.pkcs10_request
- super(bsc_elt, self).serve_pre_save_hook(q_pdu, r_pdu, cb, eb)
-
-
-class repository_elt(base_elt):
- """
- <repository/> element.
- """
-
- element_name = xmlns + "repository"
- attributes = ("action", "tag", "self_handle", "repository_handle", "bsc_handle", "peer_contact_uri", "rrdp_notification_uri")
- booleans = ("clear_replay_protection",)
-
- elements = collections.OrderedDict((
- (tag_bpki_cert, rpki.x509.X509),
- (tag_bpki_glue, rpki.x509.X509)))
-
- sql_template = rpki.sql.template(
- "repository",
- "repository_id",
- "repository_handle",
- "self_id",
- "bsc_id",
- "peer_contact_uri",
- "rrdp_notification_uri",
- ("bpki_cert", rpki.x509.X509),
- ("bpki_glue", rpki.x509.X509),
- ("last_cms_timestamp", rpki.sundial.datetime))
-
- handles = (("self", self_elt),
- ("bsc", bsc_elt))
-
- bpki_cert = None
- bpki_glue = None
- last_cms_timestamp = None
- rrdp_notification_uri = None
-
- def __repr__(self):
- return rpki.log.log_repr(self, self.repository_handle)
-
- @property
- def parents(self):
- return parent_elt.sql_fetch_where(self.gctx, "repository_id = %s", (self.repository_id,))
-
- def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb):
- actions = []
- if q_pdu.clear_replay_protection:
- actions.append(self.serve_clear_replay_protection)
- def loop(iterator, action):
- action(iterator, eb)
- rpki.async.iterator(actions, loop, cb)
-
- def serve_clear_replay_protection(self, cb, eb):
- self.last_cms_timestamp = None
- self.sql_mark_dirty()
- cb()
-
-
- def call_pubd(self, callback, errback, q_msg, handlers = {}, length_check = True): # pylint: disable=W0102
- """
- Send a message to publication daemon and return the response.
-
- As a convenience, attempting to send an empty message returns
- immediate success without sending anything.
-
- handlers is a dict of handler functions to process the response
- PDUs. If the tag value in the response PDU appears in the dict,
- the associated handler is called to process the PDU. If no tag
- matches, a default handler is called to check for errors; a
- handler value of False suppresses calling of the default handler.
- """
-
- try:
- self.gctx.sql.sweep()
-
- if len(q_msg) == 0:
- return callback()
-
- for q_pdu in q_msg:
- logger.info("Sending %r to pubd", q_pdu)
-
- bsc = self.bsc
- q_der = rpki.publication.cms_msg().wrap(q_msg, bsc.private_key_id, bsc.signing_cert, bsc.signing_cert_crl)
- bpki_ta_path = (self.gctx.bpki_ta, self.self.bpki_cert, self.self.bpki_glue, self.bpki_cert, self.bpki_glue)
-
- def done(r_der):
- try:
- logger.debug("Received response from pubd")
- r_cms = rpki.publication.cms_msg(DER = r_der)
- r_msg = r_cms.unwrap(bpki_ta_path)
- r_cms.check_replay_sql(self, self.peer_contact_uri)
- for r_pdu in r_msg:
- handler = handlers.get(r_pdu.get("tag"), rpki.publication.raise_if_error)
- if handler:
- logger.debug("Calling pubd handler %r", handler)
- handler(r_pdu)
- if length_check and len(q_msg) != len(r_msg):
- raise rpki.exceptions.BadPublicationReply("Wrong number of response PDUs from pubd: sent %r, got %r" % (q_msg, r_msg))
- callback()
- except (rpki.async.ExitNow, SystemExit):
- raise
- except Exception, e:
- errback(e)
-
- logger.debug("Sending request to pubd")
- rpki.http.client(
- url = self.peer_contact_uri,
- msg = q_der,
- callback = done,
- errback = errback)
-
- except (rpki.async.ExitNow, SystemExit):
- raise
- except Exception, e:
- errback(e)
-
-
-class parent_elt(base_elt):
- """
- <parent/> element.
- """
-
- element_name = xmlns + "parent"
- attributes = ("action", "tag", "self_handle", "parent_handle", "bsc_handle", "repository_handle",
- "peer_contact_uri", "sia_base", "sender_name", "recipient_name")
- booleans = ("rekey", "reissue", "revoke", "revoke_forgotten", "clear_replay_protection")
-
- elements = collections.OrderedDict((
- (tag_bpki_cert, rpki.x509.X509),
- (tag_bpki_glue, rpki.x509.X509)))
-
- sql_template = rpki.sql.template(
- "parent",
- "parent_id",
- "parent_handle",
- "self_id",
- "bsc_id",
- "repository_id",
- "peer_contact_uri",
- "sia_base",
- "sender_name",
- "recipient_name",
- ("bpki_cert", rpki.x509.X509),
- ("bpki_glue", rpki.x509.X509),
- ("last_cms_timestamp", rpki.sundial.datetime))
-
- handles = (("self", self_elt),
- ("bsc", bsc_elt),
- ("repository", repository_elt))
-
- bpki_cert = None
- bpki_glue = None
- last_cms_timestamp = None
-
- def __repr__(self):
- return rpki.log.log_repr(self, self.parent_handle)
-
- @property
- @rpki.sql.cache_reference
- def repository(self):
- return repository_elt.sql_fetch(self.gctx, self.repository_id)
-
- @property
- def cas(self):
- return rpki.rpkid.ca_obj.sql_fetch_where(self.gctx, "parent_id = %s", (self.parent_id,))
-
- def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb):
- actions = []
- if q_pdu.rekey:
- actions.append(self.serve_rekey)
- if q_pdu.revoke:
- actions.append(self.serve_revoke)
- if q_pdu.reissue:
- actions.append(self.serve_reissue)
- if q_pdu.revoke_forgotten:
- actions.append(self.serve_revoke_forgotten)
- if q_pdu.clear_replay_protection:
- actions.append(self.serve_clear_replay_protection)
- def loop(iterator, action):
- action(iterator, eb)
- rpki.async.iterator(actions, loop, cb)
-
- def serve_rekey(self, cb, eb):
- def loop(iterator, ca):
- ca.rekey(iterator, eb)
- rpki.async.iterator(self.cas, loop, cb)
-
- def serve_revoke(self, cb, eb):
- def loop(iterator, ca):
- ca.revoke(cb = iterator, eb = eb)
- rpki.async.iterator(self.cas, loop, cb)
-
- def serve_reissue(self, cb, eb):
- def loop(iterator, ca):
- ca.reissue(cb = iterator, eb = eb)
- rpki.async.iterator(self.cas, loop, cb)
-
- def serve_clear_replay_protection(self, cb, eb):
- self.last_cms_timestamp = None
- self.sql_mark_dirty()
- cb()
-
-
- def get_skis(self, cb, eb):
- """
- Fetch SKIs that this parent thinks we have. In theory this should
- agree with our own database, but in practice stuff can happen, so
- sometimes we need to know what our parent thinks.
-
- Result is a dictionary with the resource class name as key and a
- set of SKIs as value.
- """
-
- def done(r_msg):
- cb(dict((rc.get("class_name"),
- set(rpki.x509.X509(Base64 = c.text).gSKI()
- for c in rc.getiterator(rpki.up_down.tag_certificate)))
- for rc in r_msg.getiterator(rpki.up_down.tag_class)))
- self.up_down_list_query(done, eb)
-
-
- def revoke_skis(self, rc_name, skis_to_revoke, cb, eb):
- """
- Revoke a set of SKIs within a particular resource class.
- """
-
- def loop(iterator, ski):
- def revoked(r_pdu):
- iterator()
- logger.debug("Asking parent %r to revoke class %r, SKI %s", self, rc_name, ski)
- self.up_down_revoke_query(rc_name, ski, revoked, eb)
- rpki.async.iterator(skis_to_revoke, loop, cb)
-
-
- def serve_revoke_forgotten(self, cb, eb):
- """
- Handle a left-right revoke_forgotten action for this parent.
-
- This is a bit fiddly: we have to compare the result of an up-down
- list query with what we have locally and identify the SKIs of any
- certificates that have gone missing. This should never happen in
- ordinary operation, but can arise if we have somehow lost a
- private key, in which case there is nothing more we can do with
- the issued cert, so we have to clear it. As this really is not
- supposed to happen, we don't clear it automatically, instead we
- require an explicit trigger.
- """
-
- def got_skis(skis_from_parent):
-
- def loop(iterator, item):
- rc_name, skis_to_revoke = item
- if rc_name in ca_map:
- for ca_detail in ca_map[rc_name].issue_response_candidate_ca_details:
- skis_to_revoke.discard(ca_detail.latest_ca_cert.gSKI())
- self.revoke_skis(rc_name, skis_to_revoke, iterator, eb)
-
- ca_map = dict((ca.parent_resource_class, ca) for ca in self.cas)
- rpki.async.iterator(skis_from_parent.items(), loop, cb)
-
- self.get_skis(got_skis, eb)
-
-
- def delete(self, cb, delete_parent = True):
- """
- Delete all the CA stuff under this parent, and perhaps the parent
- itself.
- """
-
- def loop(iterator, ca):
- self.gctx.checkpoint()
- ca.delete(self, iterator)
-
- def revoke():
- self.gctx.checkpoint()
- self.serve_revoke_forgotten(done, fail)
-
- def fail(e):
- logger.warning("Trouble getting parent to revoke certificates, blundering onwards: %s", e)
- done()
-
- def done():
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
- if delete_parent:
- self.sql_delete()
- cb()
-
- rpki.async.iterator(self.cas, loop, revoke)
-
-
- def serve_destroy_hook(self, cb, eb):
- self.delete(cb, delete_parent = False)
-
-
- def _compose_up_down_query(self, query_type):
- return Element(rpki.up_down.tag_message, nsmap = rpki.up_down.nsmap, version = rpki.up_down.version,
- sender = self.sender_name, recipient = self.recipient_name, type = query_type)
-
-
- def up_down_list_query(self, cb, eb):
- q_msg = self._compose_up_down_query("list")
- self.query_up_down(q_msg, cb, eb)
-
-
- def up_down_issue_query(self, ca, ca_detail, cb, eb):
- pkcs10 = rpki.x509.PKCS10.create(
- keypair = ca_detail.private_key_id,
- is_ca = True,
- caRepository = ca.sia_uri,
- rpkiManifest = ca_detail.manifest_uri,
- rpkiNotify = ca.parent.repository.rrdp_notification_uri)
- q_msg = self._compose_up_down_query("issue")
- q_pdu = SubElement(q_msg, rpki.up_down.tag_request, class_name = ca.parent_resource_class)
- q_pdu.text = pkcs10.get_Base64()
- self.query_up_down(q_msg, cb, eb)
-
-
- def up_down_revoke_query(self, class_name, ski, cb, eb):
- q_msg = self._compose_up_down_query("revoke")
- SubElement(q_msg, rpki.up_down.tag_key, class_name = class_name, ski = ski)
- self.query_up_down(q_msg, cb, eb)
-
-
- def query_up_down(self, q_msg, cb, eb):
-
- bsc = self.bsc
- if bsc is None:
- raise rpki.exceptions.BSCNotFound("Could not find BSC %s" % self.bsc_id)
-
- if bsc.signing_cert is None:
- raise rpki.exceptions.BSCNotReady("BSC %r[%s] is not yet usable" % (bsc.bsc_handle, bsc.bsc_id))
-
- q_der = rpki.up_down.cms_msg().wrap(q_msg, bsc.private_key_id,
- bsc.signing_cert,
- bsc.signing_cert_crl)
-
- def unwrap(r_der):
- try:
- r_cms = rpki.up_down.cms_msg(DER = r_der)
- r_msg = r_cms.unwrap((self.gctx.bpki_ta,
- self.self.bpki_cert,
- self.self.bpki_glue,
- self.bpki_cert,
- self.bpki_glue))
- r_cms.check_replay_sql(self, self.peer_contact_uri)
- rpki.up_down.check_response(r_msg, q_msg.get("type"))
-
- except (SystemExit, rpki.async.ExitNow):
- raise
- except Exception, e:
- eb(e)
- else:
- cb(r_msg)
-
- rpki.http.client(
- msg = q_der,
- url = self.peer_contact_uri,
- callback = unwrap,
- errback = eb,
- content_type = rpki.up_down.content_type)
-
-
-class child_elt(base_elt):
- """
- <child/> element.
- """
-
- element_name = xmlns + "child"
- attributes = ("action", "tag", "self_handle", "child_handle", "bsc_handle")
- booleans = ("reissue", "clear_replay_protection")
-
- elements = collections.OrderedDict((
- (tag_bpki_cert, rpki.x509.X509),
- (tag_bpki_glue, rpki.x509.X509)))
-
- sql_template = rpki.sql.template(
- "child",
- "child_id",
- "child_handle",
- "self_id",
- "bsc_id",
- ("bpki_cert", rpki.x509.X509),
- ("bpki_glue", rpki.x509.X509),
- ("last_cms_timestamp", rpki.sundial.datetime))
-
- handles = (("self", self_elt),
- ("bsc", bsc_elt))
-
- bpki_cert = None
- bpki_glue = None
- last_cms_timestamp = None
-
- def __repr__(self):
- return rpki.log.log_repr(self, self.child_handle)
-
- def fetch_child_certs(self, ca_detail = None, ski = None, unique = False):
- return rpki.rpkid.child_cert_obj.fetch(self.gctx, self, ca_detail, ski, unique)
-
- @property
- def child_certs(self):
- return self.fetch_child_certs()
-
- @property
- def parents(self):
- return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,))
-
- def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb):
- actions = []
- if q_pdu.reissue:
- actions.append(self.serve_reissue)
- if q_pdu.clear_replay_protection:
- actions.append(self.serve_clear_replay_protection)
- def loop(iterator, action):
- action(iterator, eb)
- rpki.async.iterator(actions, loop, cb)
-
- def serve_reissue(self, cb, eb):
- publisher = rpki.rpkid.publication_queue()
- for child_cert in self.child_certs:
- child_cert.reissue(child_cert.ca_detail, publisher, force = True)
- publisher.call_pubd(cb, eb)
-
- def serve_clear_replay_protection(self, cb, eb):
- self.last_cms_timestamp = None
- self.sql_mark_dirty()
- cb()
-
- def ca_from_class_name(self, class_name):
- if not class_name.isdigit():
- raise rpki.exceptions.BadClassNameSyntax("Bad class name %s" % class_name)
- ca = rpki.rpkid.ca_obj.sql_fetch(self.gctx, long(class_name))
- if ca is None:
- raise rpki.exceptions.ClassNameUnknown("Unknown class name %s" % class_name)
- parent = ca.parent
- if self.self_id != parent.self_id:
- raise rpki.exceptions.ClassNameMismatch(
- "Class name mismatch: child.self_id = %d, parent.self_id = %d" % (
- self.self_id, parent.self_id))
- return ca
-
- def serve_destroy_hook(self, cb, eb):
- publisher = rpki.rpkid.publication_queue()
- for child_cert in self.child_certs:
- child_cert.revoke(publisher = publisher,
- generate_crl_and_manifest = True)
- publisher.call_pubd(cb, eb)
-
-
- def up_down_handle_list(self, q_msg, r_msg, callback, errback):
-
- def got_resources(irdb_resources):
-
- if irdb_resources.valid_until < rpki.sundial.now():
- logger.debug("Child %s's resources expired %s", self.child_handle, irdb_resources.valid_until)
- else:
- for parent in self.parents:
- for ca in parent.cas:
- ca_detail = ca.active_ca_detail
- if not ca_detail:
- logger.debug("No active ca_detail, can't issue to %s", self.child_handle)
- continue
- resources = ca_detail.latest_ca_cert.get_3779resources() & irdb_resources
- if resources.empty():
- logger.debug("No overlap between received resources and what child %s should get ([%s], [%s])",
- self.child_handle, ca_detail.latest_ca_cert.get_3779resources(), irdb_resources)
- continue
- rc = SubElement(r_msg, rpki.up_down.tag_class,
- class_name = str(ca.ca_id),
- cert_url = ca_detail.ca_cert_uri,
- resource_set_as = str(resources.asn),
- resource_set_ipv4 = str(resources.v4),
- resource_set_ipv6 = str(resources.v6),
- resource_set_notafter = str(resources.valid_until))
- for child_cert in self.fetch_child_certs(ca_detail = ca_detail):
- c = SubElement(rc, rpki.up_down.tag_certificate, cert_url = child_cert.uri)
- c.text = child_cert.cert.get_Base64()
- SubElement(rc, rpki.up_down.tag_issuer).text = ca_detail.latest_ca_cert.get_Base64()
- callback()
-
- self.gctx.irdb_query_child_resources(self.self.self_handle, self.child_handle, got_resources, errback)
-
-
- def up_down_handle_issue(self, q_msg, r_msg, callback, errback):
-
- def got_resources(irdb_resources):
-
- def done():
- rc = SubElement(r_msg, rpki.up_down.tag_class,
- class_name = class_name,
- cert_url = ca_detail.ca_cert_uri,
- resource_set_as = str(resources.asn),
- resource_set_ipv4 = str(resources.v4),
- resource_set_ipv6 = str(resources.v6),
- resource_set_notafter = str(resources.valid_until))
- c = SubElement(rc, rpki.up_down.tag_certificate, cert_url = child_cert.uri)
- c.text = child_cert.cert.get_Base64()
- SubElement(rc, rpki.up_down.tag_issuer).text = ca_detail.latest_ca_cert.get_Base64()
- callback()
-
- if irdb_resources.valid_until < rpki.sundial.now():
- raise rpki.exceptions.IRDBExpired("IRDB entry for child %s expired %s" % (
- self.child_handle, irdb_resources.valid_until))
-
- resources = irdb_resources & ca_detail.latest_ca_cert.get_3779resources()
- resources.valid_until = irdb_resources.valid_until
- req_key = pkcs10.getPublicKey()
- req_sia = pkcs10.get_SIA()
- child_cert = self.fetch_child_certs(ca_detail = ca_detail, ski = req_key.get_SKI(), unique = True)
-
- # Generate new cert or regenerate old one if necessary
-
- publisher = rpki.rpkid.publication_queue()
-
- if child_cert is None:
- child_cert = ca_detail.issue(
- ca = ca,
- child = self,
- subject_key = req_key,
- sia = req_sia,
- resources = resources,
- publisher = publisher)
- else:
- child_cert = child_cert.reissue(
- ca_detail = ca_detail,
- sia = req_sia,
- resources = resources,
- publisher = publisher)
-
- self.gctx.sql.sweep()
- assert child_cert and child_cert.sql_in_db
- publisher.call_pubd(done, errback)
-
- req = q_msg[0]
- assert req.tag == rpki.up_down.tag_request
-
- # Subsetting not yet implemented, this is the one place where we
- # have to handle it, by reporting that we're lame.
-
- if any(req.get(a) for a in ("req_resource_set_as", "req_resource_set_ipv4", "req_resource_set_ipv6")):
- raise rpki.exceptions.NotImplementedYet("req_* attributes not implemented yet, sorry")
-
- class_name = req.get("class_name")
- pkcs10 = rpki.x509.PKCS10(Base64 = req.text)
- pkcs10.check_valid_request_ca()
- ca = self.ca_from_class_name(class_name)
- ca_detail = ca.active_ca_detail
- if ca_detail is None:
- raise rpki.exceptions.NoActiveCA("No active CA for class %r" % class_name)
-
- self.gctx.irdb_query_child_resources(self.self.self_handle, self.child_handle, got_resources, errback)
-
-
- def up_down_handle_revoke(self, q_msg, r_msg, callback, errback):
-
- def done():
- SubElement(r_msg, key.tag, class_name = class_name, ski = key.get("ski"))
- callback()
-
- key = q_msg[0]
- assert key.tag == rpki.up_down.tag_key
- class_name = key.get("class_name")
- ski = base64.urlsafe_b64decode(key.get("ski") + "=")
-
- publisher = rpki.rpkid.publication_queue()
-
- ca = self.ca_from_class_name(class_name)
- for ca_detail in ca.ca_details:
- for child_cert in self.fetch_child_certs(ca_detail = ca_detail, ski = ski):
- child_cert.revoke(publisher = publisher)
-
- self.gctx.sql.sweep()
- publisher.call_pubd(done, errback)
-
-
- def serve_up_down(self, q_der, callback):
- """
- Outer layer of server handling for one up-down PDU from this child.
- """
-
- def done():
- callback(rpki.up_down.cms_msg().wrap(r_msg, bsc.private_key_id,
- bsc.signing_cert, bsc.signing_cert_crl))
-
- def lose(e):
- logger.exception("Unhandled exception serving child %r", self)
- rpki.up_down.generate_error_response_from_exception(r_msg, e, q_type)
- done()
-
- bsc = self.bsc
- if bsc is None:
- raise rpki.exceptions.BSCNotFound("Could not find BSC %s" % self.bsc_id)
- q_cms = rpki.up_down.cms_msg(DER = q_der)
- q_msg = q_cms.unwrap((self.gctx.bpki_ta,
- self.self.bpki_cert,
- self.self.bpki_glue,
- self.bpki_cert,
- self.bpki_glue))
- q_cms.check_replay_sql(self, "child", self.child_handle)
- q_type = q_msg.get("type")
- logger.info("Serving %s query from child %s [sender %s, recipient %s]",
- q_type, self.child_handle, q_msg.get("sender"), q_msg.get("recipient"))
- if rpki.up_down.enforce_strict_up_down_xml_sender and q_msg.get("sender") != self.child_handle:
- raise rpki.exceptions.BadSender("Unexpected XML sender %s" % q_msg.get("sender"))
- self.gctx.sql.sweep()
-
- r_msg = Element(rpki.up_down.tag_message, nsmap = rpki.up_down.nsmap, version = rpki.up_down.version,
- sender = q_msg.get("recipient"), recipient = q_msg.get("sender"), type = q_type + "_response")
-
- try:
- getattr(self, "up_down_handle_" + q_type)(q_msg, r_msg, done, lose)
- except (rpki.async.ExitNow, SystemExit):
- raise
- except Exception, e:
- lose(e)
-
-
class cms_msg(rpki.x509.XML_CMS_object):
"""
CMS-signed left-right PDU.
diff --git a/rpki/log.py b/rpki/log.py
index 7bad6dc2..0ef9ee5b 100644
--- a/rpki/log.py
+++ b/rpki/log.py
@@ -92,9 +92,18 @@ class Formatter(object):
return "".join(self.coformat(record)).rstrip("\n")
def coformat(self, record):
- if not self.is_syslog:
- yield time.strftime("%Y-%m-%d %H:%M:%S ", time.gmtime(record.created))
- yield "%s[%d]: " % (self.ident, record.process)
+
+ try:
+ if not self.is_syslog:
+ yield time.strftime("%Y-%m-%d %H:%M:%S ", time.gmtime(record.created))
+ except: # pylint: disable=W0702
+ yield "[$!$Time format failed]"
+
+ try:
+ yield "%s[%d]: " % (self.ident, record.process)
+ except: # pylint: disable=W0702
+ yield "[$!$ident format failed]"
+
try:
if isinstance(record.context, (str, unicode)):
yield record.context + " "
@@ -102,16 +111,26 @@ class Formatter(object):
yield repr(record.context) + " "
except AttributeError:
pass
- yield record.getMessage()
- if record.exc_info:
- if self.is_syslog or not enable_tracebacks:
- lines = tb.format_exception_only(record.exc_info[0], record.exc_info[1])
- lines.insert(0, ": ")
- else:
- lines = tb.format_exception(record.exc_info[0], record.exc_info[1], record.exc_info[2])
- lines.insert(0, "\n")
- for line in lines:
- yield line
+ except: # pylint: disable=W0702
+ yield "[$!$context format failed]"
+
+ try:
+ yield record.getMessage()
+ except: # pylint: disable=W0702
+ yield "[$!$record.getMessage() failed]"
+
+ try:
+ if record.exc_info:
+ if self.is_syslog or not enable_tracebacks:
+ lines = tb.format_exception_only(record.exc_info[0], record.exc_info[1])
+ lines.insert(0, ": ")
+ else:
+ lines = tb.format_exception(record.exc_info[0], record.exc_info[1], record.exc_info[2])
+ lines.insert(0, "\n")
+ for line in lines:
+ yield line
+ except: # pylint: disable=W0702
+ yield "[$!$exception formatting failed]"
def argparse_setup(parser, default_thunk = None):
diff --git a/rpki/rpkid.py b/rpki/rpkid.py
index bc13cd9a..619f8650 100644
--- a/rpki/rpkid.py
+++ b/rpki/rpkid.py
@@ -25,7 +25,6 @@ import os
import re
import time
import random
-import base64
import logging
import argparse
@@ -33,7 +32,6 @@ import rpki.resource_set
import rpki.up_down
import rpki.left_right
import rpki.x509
-import rpki.sql
import rpki.http
import rpki.config
import rpki.exceptions
@@ -55,7 +53,8 @@ class main(object):
def __init__(self):
- os.environ["TZ"] = "UTC"
+ os.environ.update(TZ = "UTC",
+ DJANGO_SETTINGS_MODULE = "rpki.django_settings.rpkid")
time.tzset()
self.irdbd_cms_timestamp = None
@@ -105,13 +104,17 @@ class main(object):
if self.profile:
logger.info("Running in profile mode with output to %s", self.profile)
+ logger.debug("Initializing Django")
+
import django
django.setup()
+ logger.debug("Initializing rpkidb...")
+
global rpki # pylint: disable=W0602
import rpki.rpkidb # pylint: disable=W0621
- self.sql = rpki.sql.session(self.cfg)
+ logger.debug("Initializing rpkidb...done")
self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta"))
self.irdb_cert = rpki.x509.X509(Auto_update = self.cfg.get("irdb-cert"))
@@ -287,13 +290,8 @@ class main(object):
def handle_list_published_objects(self, q_pdu, r_msg):
"""
<list_published_objects/> server.
-
- This is written for the old SQL API, will need rewriting once we
- switch rpkid to Django ORM.
"""
- logger.debug(".handle_list_published_objects() %s", ElementToString(q_pdu))
-
self_handle = q_pdu.get("self_handle")
msg_tag = q_pdu.get("tag")
@@ -301,69 +299,49 @@ class main(object):
if msg_tag is not None:
kw.update(tag = msg_tag)
- for parent in rpki.left_right.self_elt.serve_fetch_handle(self, None, self_handle).parents:
- for ca in parent.cas:
- ca_detail = ca.active_ca_detail
- if ca_detail is not None:
-
- SubElement(r_msg, rpki.left_right.tag_list_published_objects,
- uri = ca_detail.crl_uri, **kw).text = ca_detail.latest_crl.get_Base64()
-
- SubElement(r_msg, rpki.left_right.tag_list_published_objects,
- uri = ca_detail.manifest_uri, **kw).text = ca_detail.latest_manifest.get_Base64()
-
- for c in ca_detail.child_certs:
- SubElement(r_msg, rpki.left_right.tag_list_published_objects,
- uri = c.uri, child_handle = c.child.child_handle, **kw).text = c.cert.get_Base64()
-
- for r in ca_detail.roas:
- if r.roa is not None:
- SubElement(r_msg, rpki.left_right.tag_list_published_objects,
- uri = r.uri, **kw).text = r.roa.get_Base64()
-
- for g in ca_detail.ghostbusters:
- SubElement(r_msg, rpki.left_right.tag_list_published_objects,
- uri = g.uri, **kw).text = g.ghostbuster.get_Base64()
-
- for c in ca_detail.ee_certificates:
- SubElement(r_msg, rpki.left_right.tag_list_published_objects,
- uri = c.uri, **kw).text = c.cert.get_Base64()
+ for ca_detail in rpki.rpkidb.models.CADetail.objects.filter(ca__parent__self__self_handle = self_handle, state = "active"):
+ SubElement(r_msg, rpki.left_right.tag_list_published_objects,
+ uri = ca_detail.crl_uri, **kw).text = ca_detail.latest_crl.get_Base64()
+ SubElement(r_msg, rpki.left_right.tag_list_published_objects,
+ uri = ca_detail.manifest_uri, **kw).text = ca_detail.latest_manifest.get_Base64()
+ for c in ca_detail.child_certs.all():
+ SubElement(r_msg, rpki.left_right.tag_list_published_objects,
+ uri = c.uri, child_handle = c.child.child_handle, **kw).text = c.cert.get_Base64()
+ for r in ca_detail.roas.filter(roa__isnull = False):
+ SubElement(r_msg, rpki.left_right.tag_list_published_objects,
+ uri = r.uri, **kw).text = r.roa.get_Base64()
+ for g in ca_detail.ghostbusters.all():
+ SubElement(r_msg, rpki.left_right.tag_list_published_objects,
+ uri = g.uri, **kw).text = g.ghostbuster.get_Base64()
+ for c in ca_detail.ee_certificates.all():
+ SubElement(r_msg, rpki.left_right.tag_list_published_objects,
+ uri = c.uri, **kw).text = c.cert.get_Base64()
def handle_list_received_resources(self, q_pdu, r_msg):
"""
<list_received_resources/> server.
-
- This is written for the old SQL API, will need rewriting once we
- switch rpkid to Django ORM.
"""
logger.debug(".handle_list_received_resources() %s", ElementToString(q_pdu))
-
self_handle = q_pdu.get("self_handle")
msg_tag = q_pdu.get("tag")
-
- for parent in rpki.left_right.self_elt.serve_fetch_handle(self, None, self_handle).parents:
- for ca in parent.cas:
- ca_detail = ca.active_ca_detail
- if ca_detail is not None and ca_detail.latest_ca_cert is not None:
-
- cert = ca_detail.latest_ca_cert
- resources = cert.get_3779resources()
-
- r_pdu = SubElement(r_msg, rpki.left_right.tag_list_received_resources,
- self_handle = self_handle,
- parent_handle = parent.parent_handle,
- uri = ca_detail.ca_cert_uri,
- notBefore = str(cert.getNotBefore()),
- notAfter = str(cert.getNotAfter()),
- sia_uri = cert.get_sia_directory_uri(),
- aia_uri = cert.get_aia_uri(),
- asn = str(resources.asn),
- ipv4 = str(resources.v4),
- ipv6 = str(resources.v6))
-
- if msg_tag is not None:
- r_pdu.set("tag", msg_tag)
+ for ca_detail in rpki.rpkidb.models.CADetail.objects.filter(ca__parent__self__self_handle = self_handle,
+ state = "active", latest_ca_cert__isnull = False):
+ cert = ca_detail.latest_ca_cert
+ resources = cert.get_3779resources()
+ r_pdu = SubElement(r_msg, rpki.left_right.tag_list_received_resources,
+ self_handle = self_handle,
+ parent_handle = ca_detail.ca.parent.parent_handle,
+ uri = ca_detail.ca_cert_uri,
+ notBefore = str(cert.getNotBefore()),
+ notAfter = str(cert.getNotAfter()),
+ sia_uri = cert.get_sia_directory_uri(),
+ aia_uri = cert.get_aia_uri(),
+ asn = str(resources.asn),
+ ipv4 = str(resources.v4),
+ ipv6 = str(resources.v6))
+ if msg_tag is not None:
+ r_pdu.set("tag", msg_tag)
def left_right_handler(self, query, path, cb):
@@ -380,6 +358,8 @@ class main(object):
#
# Need to clone logic from rpki.pubd.main.control_handler().
+ logger.debug("Entering left_right_handler()")
+
try:
q_cms = rpki.left_right.cms_msg(DER = query)
q_msg = q_cms.unwrap((self.bpki_ta, self.irbe_cert))
@@ -397,102 +377,63 @@ class main(object):
raise rpki.exceptions.BadQuery("Message type is not query")
def done():
- self.sql.sweep()
cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, self.rpkid_key, self.rpkid_cert))
def loop(iterator, q_pdu):
+ logger.debug("left_right_handler():loop(%r)", q_pdu)
+
def fail(e):
if not isinstance(e, rpki.exceptions.NotFound):
logger.exception("Unhandled exception serving left-right PDU %r", q_pdu)
- # Compatability kludge
- if isinstance(q_pdu, rpki.left_right.base_elt):
- error_self_handle = q_pdu.self_handle
- error_tag = q_pdu.tag
- else:
- error_self_handle = q_pdu.get("self_handle")
- error_tag = q_pdu.get("tag")
+ error_self_handle = q_pdu.get("self_handle")
+ error_tag = q_pdu.get("tag")
r_pdu = SubElement(r_msg, rpki.left_right.tag_report_error, error_code = e.__class__.__name__)
r_pdu.text = str(e)
if error_tag is not None:
r_pdu.set("tag", error_tag)
if error_self_handle is not None:
r_pdu.set("self_handle", error_self_handle)
- self.sql.sweep()
cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, self.rpkid_key, self.rpkid_cert))
try:
if q_pdu.tag in self.left_right_trivial_handlers:
+ logger.debug("left_right_handler(): trivial handler")
self.left_right_trivial_handlers[q_pdu.tag](q_pdu, r_msg)
iterator()
- elif True: # Old-style handlers
-
- q_map = { rpki.left_right.tag_self : rpki.left_right.self_elt,
- rpki.left_right.tag_bsc : rpki.left_right.bsc_elt,
- rpki.left_right.tag_parent : rpki.left_right.parent_elt,
- rpki.left_right.tag_child : rpki.left_right.child_elt,
- rpki.left_right.tag_repository : rpki.left_right.repository_elt }
- q_pdu = q_map[q_pdu.tag].fromXML(q_pdu)
- q_pdu.gctx = self
- q_pdu.serve_dispatch(r_msg, iterator, fail)
-
- else: # New-style handlers
-
- # Notes on hooks in old code
- #
- # .serve_pre_save_hook(): used by all classes to do some
- # kind of handle fixup which I think is now OBE. Also
- # used by BSC for key generation, because schema (and
- # corresponding new model) don't allow NULL for private
- # key or PKCS10 request, so either we have to relax the
- # schema constraint or generate key before saving.
- # (bsc)
- #
- # .serve_destroy_hook(): used by several objects to
- # trigger revocation of related objects. Will probably
- # need to preserve this behavior.
- # (self, parent, child)
- #
- # .serve_post_save_hook(): used to trigger various actions
- # based on boolean attributes in XML.
- # (self, repository, parent, child)
-
+ else:
action = q_pdu.get("action")
model = self.left_right_models[q_pdu.tag]
+ logger.debug("left_right_handler(): action %s model %r", action, model)
+
if action in ("get", "list"):
+ logger.debug("left_right_handler(): get/list")
for obj in model.objects.xml_list(q_pdu):
- obj.xml_template.encode(obj, r_msg)
+ logger.debug("left_right_handler(): get/list: encoding %r", obj)
+ obj.xml_template.encode(obj, q_pdu, r_msg)
+ iterator()
elif action == "destroy":
+ def destroy_cb():
+ obj.delete()
+ obj.xml_template.acknowledge(obj, q_pdu, r_msg)
+ iterator()
+ logger.debug("left_right_handler(): destroy")
obj = model.objects.xml_get_for_delete(q_pdu)
- try:
- hook = obj.xml_pre_delete_hook
- except AttributeError:
- pass
- else:
- hook()
- obj.delete()
- obj.xml_template.acknowledge(obj, q_pdu, r_msg)
+ obj.xml_pre_delete_hook(self, destroy_cb, fail)
elif action in ("create", "set"):
+ def create_set_cb():
+ obj.xml_template.acknowledge(obj, q_pdu, r_msg)
+ iterator()
+ logger.debug("left_right_handler(): create/set")
obj = model.objects.xml_get_or_create(q_pdu)
obj.xml_template.decode(obj, q_pdu)
- try:
- hook = obj.xml_pre_save_hook
- except AttributeError:
- pass
- else:
- hook(q_pdu)
+ obj.xml_pre_save_hook(q_pdu)
obj.save()
- try:
- hook = obj.xml_post_save_hook
- except AttributeError:
- pass
- else:
- hook(q_pdu)
- obj.xml_template.acknowledge(obj, q_pdu, r_msg)
+ obj.xml_post_save_hook(self, q_pdu, create_set_cb, fail)
else:
raise rpki.exceptions.BadQuery
@@ -519,7 +460,6 @@ class main(object):
"""
def done(r_der):
- self.sql.sweep()
cb(200, body = r_der)
try:
@@ -527,15 +467,12 @@ class main(object):
if match is None:
raise rpki.exceptions.BadContactURL("Bad URL path received in up_down_handler(): %s" % path)
self_handle, child_handle = match.groups()
- child = rpki.left_right.child_elt.sql_fetch_where1(
- gctx = self,
- where = "self.self_handle = %s AND child.child_handle = %s AND child.self_id = self.self_id",
- args = (self_handle, child_handle),
- also_from = "self")
- if child is None:
+ try:
+ child = rpki.rpkidb.models.Child.objects.get(self__self_handle = self_handle, child_handle = child_handle)
+ except rpki.rpkidb.models.Child.DoesNotExist:
raise rpki.exceptions.ChildNotFound("Could not find child %s of self %s in up_down_handler()" % (
child_handle, self_handle))
- child.serve_up_down(q_der, done)
+ child.serve_up_down(self, q_der, done)
except (rpki.async.ExitNow, SystemExit):
raise
except (rpki.exceptions.ChildNotFound, rpki.exceptions.BadContactURL), e:
@@ -599,7 +536,6 @@ class main(object):
logger.debug("Starting cron run")
def done():
- self.sql.sweep()
self.cron_timeout = None
logger.info("Finished cron run started at %s", now)
if cb is not None:
@@ -607,12 +543,12 @@ class main(object):
completion = rpki.rpkid_tasks.CompletionHandler(done)
try:
- selves = rpki.left_right.self_elt.sql_fetch_all(self)
+ selves = rpki.rpkidb.models.Self.objects.all()
except Exception:
- logger.exception("Error pulling self_elts from SQL, maybe SQL server is down?")
+ logger.exception("Error pulling selves from SQL, maybe SQL server is down?")
else:
for s in selves:
- s.schedule_cron_tasks(completion)
+ s.schedule_cron_tasks(self, completion)
nothing_queued = completion.count == 0
assert self.use_internal_cron or self.cron_timeout is None
@@ -652,2110 +588,6 @@ class main(object):
logger.debug("Starting externally triggered cron")
self.cron(done)
-class ca_obj(rpki.sql.sql_persistent):
- """
- Internal CA object.
- """
-
- sql_template = rpki.sql.template(
- "ca",
- "ca_id",
- "last_crl_sn",
- ("next_crl_update", rpki.sundial.datetime),
- "last_issued_sn",
- "last_manifest_sn",
- ("next_manifest_update", rpki.sundial.datetime),
- "sia_uri",
- "parent_id",
- "parent_resource_class")
-
- last_crl_sn = 0
- last_issued_sn = 0
- last_manifest_sn = 0
-
- def __repr__(self):
- return rpki.log.log_repr(self, repr(self.parent), self.parent_resource_class)
-
- @property
- @rpki.sql.cache_reference
- def parent(self):
- """
- Fetch parent object to which this CA object links.
- """
-
- return rpki.left_right.parent_elt.sql_fetch(self.gctx, self.parent_id)
-
- @property
- def ca_details(self):
- """
- Fetch all ca_detail objects that link to this CA object.
- """
-
- return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s", (self.ca_id,))
-
- @property
- def pending_ca_details(self):
- """
- Fetch the pending ca_details for this CA, if any.
- """
-
- return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'pending'", (self.ca_id,))
-
- @property
- def active_ca_detail(self):
- """
- Fetch the active ca_detail for this CA, if any.
- """
-
- return ca_detail_obj.sql_fetch_where1(self.gctx, "ca_id = %s AND state = 'active'", (self.ca_id,))
-
- @property
- def deprecated_ca_details(self):
- """
- Fetch deprecated ca_details for this CA, if any.
- """
-
- return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'deprecated'", (self.ca_id,))
-
- @property
- def active_or_deprecated_ca_details(self):
- """
- Fetch active and deprecated ca_details for this CA, if any.
- """
-
- return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND (state = 'active' OR state = 'deprecated')", (self.ca_id,))
-
- @property
- def revoked_ca_details(self):
- """
- Fetch revoked ca_details for this CA, if any.
- """
-
- return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'revoked'", (self.ca_id,))
-
- @property
- def issue_response_candidate_ca_details(self):
- """
- Fetch ca_details which are candidates for consideration when
- processing an up-down issue_response PDU.
- """
-
- return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state != 'revoked'", (self.ca_id,))
-
- def construct_sia_uri(self, parent, rc):
- """
- Construct the sia_uri value for this CA given configured
- information and the parent's up-down protocol list_response PDU.
- """
-
- sia_uri = rc.get("suggested_sia_head", "")
- if not sia_uri.startswith("rsync://") or not sia_uri.startswith(parent.sia_base):
- sia_uri = parent.sia_base
- if not sia_uri.endswith("/"):
- raise rpki.exceptions.BadURISyntax("SIA URI must end with a slash: %s" % sia_uri)
- return sia_uri
-
- def check_for_updates(self, parent, rc, cb, eb):
- """
- Parent has signaled continued existance of a resource class we
- already knew about, so we need to check for an updated
- certificate, changes in resource coverage, revocation and reissue
- with the same key, etc.
- """
-
- sia_uri = self.construct_sia_uri(parent, rc)
- sia_uri_changed = self.sia_uri != sia_uri
- if sia_uri_changed:
- logger.debug("SIA changed: was %s now %s", self.sia_uri, sia_uri)
- self.sia_uri = sia_uri
- self.sql_mark_dirty()
-
- class_name = rc.get("class_name")
-
- rc_resources = rpki.resource_set.resource_bag(
- rc.get("resource_set_as"),
- rc.get("resource_set_ipv4"),
- rc.get("resource_set_ipv6"),
- rc.get("resource_set_notafter"))
-
- cert_map = {}
- for c in rc.getiterator(rpki.up_down.tag_certificate):
- x = rpki.x509.X509(Base64 = c.text)
- u = rpki.up_down.multi_uri(c.get("cert_url")).rsync()
- cert_map[x.gSKI()] = (x, u)
-
- def loop(iterator, ca_detail):
-
- self.gctx.checkpoint()
-
- rc_cert, rc_cert_uri = cert_map.pop(ca_detail.public_key.gSKI(), (None, None))
-
- if rc_cert is None:
-
- logger.warning("SKI %s in resource class %s is in database but missing from list_response to %s from %s, "
- "maybe parent certificate went away?",
- ca_detail.public_key.gSKI(), class_name, parent.self.self_handle, parent.parent_handle)
- publisher = publication_queue()
- ca_detail.destroy(ca = ca_detail.ca, publisher = publisher)
- return publisher.call_pubd(iterator, eb)
-
- else:
-
- if ca_detail.state == "active" and ca_detail.ca_cert_uri != rc_cert_uri:
- logger.debug("AIA changed: was %s now %s", ca_detail.ca_cert_uri, rc_cert_uri)
- ca_detail.ca_cert_uri = rc_cert_uri
- ca_detail.sql_mark_dirty()
-
- if ca_detail.state in ("pending", "active"):
-
- if ca_detail.state == "pending":
- current_resources = rpki.resource_set.resource_bag()
- else:
- current_resources = ca_detail.latest_ca_cert.get_3779resources()
-
- if (ca_detail.state == "pending" or
- sia_uri_changed or
- ca_detail.latest_ca_cert != rc_cert or
- ca_detail.latest_ca_cert.getNotAfter() != rc_resources.valid_until or
- current_resources.undersized(rc_resources) or
- current_resources.oversized(rc_resources)):
- return ca_detail.update(
- parent = parent,
- ca = self,
- rc = rc,
- sia_uri_changed = sia_uri_changed,
- old_resources = current_resources,
- callback = iterator,
- errback = eb)
-
- iterator()
-
- def done():
- if cert_map:
- logger.warning("Unknown certificate SKI%s %s in resource class %s in list_response to %s from %s, maybe you want to \"revoke_forgotten\"?",
- "" if len(cert_map) == 1 else "s", ", ".join(cert_map), class_name, parent.self.self_handle, parent.parent_handle)
- self.gctx.sql.sweep()
- self.gctx.checkpoint()
- cb()
-
- ca_details = self.issue_response_candidate_ca_details
-
- if True:
- skis_parent = set(cert_map)
- skis_me = set(x.latest_ca_cert.gSKI()
- for x in ca_details
- if x.latest_ca_cert is not None)
- for ski in skis_parent & skis_me:
- logger.debug("Parent %s agrees that %s has SKI %s in resource class %s",
- parent.parent_handle, parent.self.self_handle, ski, class_name)
- for ski in skis_parent - skis_me:
- logger.debug("Parent %s thinks %s has SKI %s in resource class %s but I don't think so",
- parent.parent_handle, parent.self.self_handle, ski, class_name)
- for ski in skis_me - skis_parent:
- logger.debug("I think %s has SKI %s in resource class %s but parent %s doesn't think so",
- parent.self.self_handle, ski, class_name, parent.parent_handle)
-
- if ca_details:
- rpki.async.iterator(ca_details, loop, done)
- else:
- logger.warning("Existing resource class %s to %s from %s with no certificates, rekeying",
- class_name, parent.self.self_handle, parent.parent_handle)
- self.gctx.checkpoint()
- self.rekey(cb, eb)
-
- # Called from exactly one place, in rpki.rpkid_tasks.PollParentTask.class_loop().
- # Probably want to refactor.
- @classmethod
- def create(cls, parent, rc, cb, eb):
- """
- Parent has signaled existance of a new resource class, so we need
- to create and set up a corresponding CA object.
- """
-
- self = cls()
- self.gctx = parent.gctx
- self.parent_id = parent.parent_id
- self.parent_resource_class = rc.get("class_name")
- self.sql_store()
- try:
- self.sia_uri = self.construct_sia_uri(parent, rc)
- except rpki.exceptions.BadURISyntax:
- self.sql_delete()
- raise
- ca_detail = ca_detail_obj.create(self)
-
- def done(r_msg):
- c = r_msg[0][0]
- logger.debug("CA %r received certificate %s", self, c.get("cert_url"))
- ca_detail.activate(
- ca = self,
- cert = rpki.x509.X509(Base64 = c.text),
- uri = c.get("cert_url"),
- callback = cb,
- errback = eb)
-
- logger.debug("Sending issue request to %r from %r", parent, self.create)
- parent.up_down_issue_query(self, ca_detail, done, eb)
-
- def destroy(self, parent, callback):
- """
- The list of current resource classes received from parent does not
- include the class corresponding to this CA, so we need to delete
- it (and its little dog too...).
-
- All certs published by this CA are now invalid, so need to
- withdraw them, the CRL, and the manifest from the repository,
- delete all child_cert and ca_detail records associated with this
- CA, then finally delete this CA itself.
- """
-
- def lose(e):
- logger.exception("Could not delete CA %r, skipping", self)
- callback()
-
- def done():
- logger.debug("Deleting %r", self)
- self.sql_delete()
- callback()
-
- publisher = publication_queue()
- for ca_detail in self.ca_details:
- ca_detail.destroy(ca = self, publisher = publisher, allow_failure = True)
- publisher.call_pubd(done, lose)
-
- def next_serial_number(self):
- """
- Allocate a certificate serial number.
- """
-
- self.last_issued_sn += 1
- self.sql_mark_dirty()
- return self.last_issued_sn
-
- def next_manifest_number(self):
- """
- Allocate a manifest serial number.
- """
-
- self.last_manifest_sn += 1
- self.sql_mark_dirty()
- return self.last_manifest_sn
-
- def next_crl_number(self):
- """
- Allocate a CRL serial number.
- """
-
- self.last_crl_sn += 1
- self.sql_mark_dirty()
- return self.last_crl_sn
-
- def rekey(self, cb, eb):
- """
- Initiate a rekey operation for this ca. Generate a new keypair.
- Request cert from parent using new keypair. Mark result as our
- active ca_detail. Reissue all child certs issued by this ca using
- the new ca_detail.
- """
-
- parent = self.parent
- old_detail = self.active_ca_detail
- new_detail = ca_detail_obj.create(self)
-
- def done(r_msg):
- c = r_msg[0][0]
- logger.debug("CA %r received certificate %s", self, c.get("cert_url"))
- new_detail.activate(
- ca = self,
- cert = rpki.x509.X509(Base64 = c.text),
- uri = c.get("cert_url"),
- predecessor = old_detail,
- callback = cb,
- errback = eb)
-
- logger.debug("Sending issue request to %r from %r", parent, self.rekey)
- parent.up_down_issue_query(self, new_detail, done, eb)
-
- def revoke(self, cb, eb, revoke_all = False):
- """
- Revoke deprecated ca_detail objects associated with this CA, or
- all ca_details associated with this CA if revoke_all is set.
- """
-
- def loop(iterator, ca_detail):
- ca_detail.revoke(cb = iterator, eb = eb)
-
- ca_details = self.ca_details if revoke_all else self.deprecated_ca_details
-
- rpki.async.iterator(ca_details, loop, cb)
-
- def reissue(self, cb, eb):
- """
- Reissue all current certificates issued by this CA.
- """
-
- ca_detail = self.active_ca_detail
- if ca_detail:
- ca_detail.reissue(cb, eb)
- else:
- cb()
-
-class ca_detail_obj(rpki.sql.sql_persistent):
- """
- Internal CA detail object.
- """
-
- sql_template = rpki.sql.template(
- "ca_detail",
- "ca_detail_id",
- ("private_key_id", rpki.x509.RSA),
- ("public_key", rpki.x509.PublicKey),
- ("latest_ca_cert", rpki.x509.X509),
- ("manifest_private_key_id", rpki.x509.RSA),
- ("manifest_public_key", rpki.x509.PublicKey),
- ("latest_manifest_cert", rpki.x509.X509),
- ("latest_manifest", rpki.x509.SignedManifest),
- ("latest_crl", rpki.x509.CRL),
- ("crl_published", rpki.sundial.datetime),
- ("manifest_published", rpki.sundial.datetime),
- "state",
- "ca_cert_uri",
- "ca_id")
-
- crl_published = None
- manifest_published = None
- latest_ca_cert = None
- latest_crl = None
- latest_manifest = None
- ca_cert_uri = None
-
- def __repr__(self):
- return rpki.log.log_repr(self, repr(self.ca), self.state, self.ca_cert_uri)
-
- def sql_decode(self, vals):
- """
- Extra assertions for SQL decode of a ca_detail_obj.
- """
-
- rpki.sql.sql_persistent.sql_decode(self, vals)
- assert self.public_key is None or self.private_key_id is None or self.public_key.get_DER() == self.private_key_id.get_public_DER()
- assert self.manifest_public_key is None or self.manifest_private_key_id is None or self.manifest_public_key.get_DER() == self.manifest_private_key_id.get_public_DER()
-
- @property
- @rpki.sql.cache_reference
- def ca(self):
- """
- Fetch CA object to which this ca_detail links.
- """
-
- return ca_obj.sql_fetch(self.gctx, self.ca_id)
-
- def fetch_child_certs(self, child = None, ski = None, unique = False, unpublished = None):
- """
- Fetch all child_cert objects that link to this ca_detail.
- """
-
- return rpki.rpkid.child_cert_obj.fetch(self.gctx, child, self, ski, unique, unpublished)
-
- @property
- def child_certs(self):
- """
- Fetch all child_cert objects that link to this ca_detail.
- """
-
- return self.fetch_child_certs()
-
- def unpublished_child_certs(self, when):
- """
- Fetch all unpublished child_cert objects linked to this ca_detail
- with attempted publication dates older than when.
- """
-
- return self.fetch_child_certs(unpublished = when)
-
- @property
- def revoked_certs(self):
- """
- Fetch all revoked_cert objects that link to this ca_detail.
- """
-
- return revoked_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,))
-
- @property
- def roas(self):
- """
- Fetch all ROA objects that link to this ca_detail.
- """
-
- return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,))
-
- def unpublished_roas(self, when):
- """
- Fetch all unpublished ROA objects linked to this ca_detail with
- attempted publication dates older than when.
- """
-
- return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s AND published IS NOT NULL and published < %s",
- (self.ca_detail_id, when))
-
- @property
- def ghostbusters(self):
- """
- Fetch all Ghostbuster objects that link to this ca_detail.
- """
-
- return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,))
-
- def unpublished_ghostbusters(self, when):
- """
- Fetch all unpublished Ghostbusters objects linked to this
- ca_detail with attempted publication dates older than when.
- """
-
- return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx,
- "ca_detail_id = %s AND published IS NOT NULL and published < %s",
- (self.ca_detail_id, when))
-
- @property
- def ee_certificates(self):
- """
- Fetch all EE certificate objects that link to this ca_detail.
- """
-
- return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,))
-
- def unpublished_ee_certificates(self, when):
- """
- Fetch all unpublished EE certificate objects linked to this
- ca_detail with attempted publication dates older than when.
- """
-
- return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx,
- "ca_detail_id = %s AND published IS NOT NULL and published < %s",
- (self.ca_detail_id, when))
-
- @property
- def crl_uri(self):
- """
- Return publication URI for this ca_detail's CRL.
- """
-
- return self.ca.sia_uri + self.crl_uri_tail
-
- @property
- def crl_uri_tail(self):
- """
- Return tail (filename portion) of publication URI for this ca_detail's CRL.
- """
-
- return self.public_key.gSKI() + ".crl"
-
- @property
- def manifest_uri(self):
- """
- Return publication URI for this ca_detail's manifest.
- """
-
- return self.ca.sia_uri + self.public_key.gSKI() + ".mft"
-
- def has_expired(self):
- """
- Return whether this ca_detail's certificate has expired.
- """
-
- return self.latest_ca_cert.getNotAfter() <= rpki.sundial.now()
-
- def covers(self, target):
- """
- Test whether this ca_detail covers a given set of resources.
- """
-
- assert not target.asn.inherit and not target.v4.inherit and not target.v6.inherit
- me = self.latest_ca_cert.get_3779resources()
- return target.asn <= me.asn and target.v4 <= me.v4 and target.v6 <= me.v6
-
- def activate(self, ca, cert, uri, callback, errback, predecessor = None):
- """
- Activate this ca_detail.
- """
-
- publisher = publication_queue()
-
- self.latest_ca_cert = cert
- self.ca_cert_uri = uri
- self.generate_manifest_cert()
- self.state = "active"
- self.generate_crl(publisher = publisher)
- self.generate_manifest(publisher = publisher)
- self.sql_store()
-
- if predecessor is not None:
- predecessor.state = "deprecated"
- predecessor.sql_store()
- for child_cert in predecessor.child_certs:
- child_cert.reissue(ca_detail = self, publisher = publisher)
- for roa in predecessor.roas:
- roa.regenerate(publisher = publisher)
- for ghostbuster in predecessor.ghostbusters:
- ghostbuster.regenerate(publisher = publisher)
- predecessor.generate_crl(publisher = publisher)
- predecessor.generate_manifest(publisher = publisher)
-
- publisher.call_pubd(callback, errback)
-
- def destroy(self, ca, publisher, allow_failure = False):
- """
- Delete this ca_detail and all of the certs it issued.
-
- If allow_failure is true, we clean up as much as we can but don't
- raise an exception.
- """
-
- repository = ca.parent.repository
- handler = False if allow_failure else None
- for child_cert in self.child_certs:
- publisher.queue(uri = child_cert.uri,
- old_obj = child_cert.cert,
- repository = repository,
- handler = handler)
- child_cert.sql_mark_deleted()
- for roa in self.roas:
- roa.revoke(publisher = publisher, allow_failure = allow_failure, fast = True)
- for ghostbuster in self.ghostbusters:
- ghostbuster.revoke(publisher = publisher, allow_failure = allow_failure, fast = True)
- try:
- latest_manifest = self.latest_manifest
- except AttributeError:
- latest_manifest = None
- if latest_manifest is not None:
- publisher.queue(uri = self.manifest_uri,
- old_obj = self.latest_manifest,
- repository = repository,
- handler = handler)
- try:
- latest_crl = self.latest_crl
- except AttributeError:
- latest_crl = None
- if latest_crl is not None:
- publisher.queue(uri = self.crl_uri,
- old_obj = self.latest_crl,
- repository = repository,
- handler = handler)
- self.gctx.sql.sweep()
- for cert in self.revoked_certs: # + self.child_certs
- logger.debug("Deleting %r", cert)
- cert.sql_delete()
- logger.debug("Deleting %r", self)
- self.sql_delete()
-
- def revoke(self, cb, eb):
- """
- Request revocation of all certificates whose SKI matches the key
- for this ca_detail.
-
- Tasks:
-
- - Request revocation of old keypair by parent.
-
- - Revoke all child certs issued by the old keypair.
-
- - Generate a final CRL, signed with the old keypair, listing all
- the revoked certs, with a next CRL time after the last cert or
- CRL signed by the old keypair will have expired.
-
- - Generate a corresponding final manifest.
-
- - Destroy old keypairs.
-
- - Leave final CRL and manifest in place until their nextupdate
- time has passed.
- """
-
- ca = self.ca
- parent = ca.parent
- class_name = ca.parent_resource_class
- gski = self.latest_ca_cert.gSKI()
-
- def parent_revoked(r_msg):
-
- if r_msg[0].get("class_name") != class_name:
- raise rpki.exceptions.ResourceClassMismatch
-
- if r_msg[0].get("ski") != gski:
- raise rpki.exceptions.SKIMismatch
-
- logger.debug("Parent revoked %s, starting cleanup", gski)
-
- crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval)
-
- nextUpdate = rpki.sundial.now()
-
- if self.latest_manifest is not None:
- self.latest_manifest.extract_if_needed()
- nextUpdate = nextUpdate.later(self.latest_manifest.getNextUpdate())
-
- if self.latest_crl is not None:
- nextUpdate = nextUpdate.later(self.latest_crl.getNextUpdate())
-
- publisher = publication_queue()
-
- for child_cert in self.child_certs:
- nextUpdate = nextUpdate.later(child_cert.cert.getNotAfter())
- child_cert.revoke(publisher = publisher)
-
- for roa in self.roas:
- nextUpdate = nextUpdate.later(roa.cert.getNotAfter())
- roa.revoke(publisher = publisher)
-
- for ghostbuster in self.ghostbusters:
- nextUpdate = nextUpdate.later(ghostbuster.cert.getNotAfter())
- ghostbuster.revoke(publisher = publisher)
-
- nextUpdate += crl_interval
- self.generate_crl(publisher = publisher, nextUpdate = nextUpdate)
- self.generate_manifest(publisher = publisher, nextUpdate = nextUpdate)
- self.private_key_id = None
- self.manifest_private_key_id = None
- self.manifest_public_key = None
- self.latest_manifest_cert = None
- self.state = "revoked"
- self.sql_mark_dirty()
- publisher.call_pubd(cb, eb)
-
- logger.debug("Asking parent to revoke CA certificate %s", gski)
- parent.up_down_revoke_query(class_name, gski, parent_revoked, eb)
-
-
- def update(self, parent, ca, rc, sia_uri_changed, old_resources, callback, errback):
- """
- Need to get a new certificate for this ca_detail and perhaps frob
- children of this ca_detail.
- """
-
- def issued(r_msg):
- c = r_msg[0][0]
- cert = rpki.x509.X509(Base64 = c.text)
- cert_url = c.get("cert_url")
-
- logger.debug("CA %r received certificate %s", self, cert_url)
-
- if self.state == "pending":
- return self.activate(
- ca = ca,
- cert = cert,
- uri = cert_url,
- callback = callback,
- errback = errback)
-
- validity_changed = self.latest_ca_cert is None or self.latest_ca_cert.getNotAfter() != cert.getNotAfter()
-
- publisher = publication_queue()
-
- if self.latest_ca_cert != cert:
- self.latest_ca_cert = cert
- self.sql_mark_dirty()
- self.generate_manifest_cert()
- self.generate_crl(publisher = publisher)
- self.generate_manifest(publisher = publisher)
-
- new_resources = self.latest_ca_cert.get_3779resources()
-
- if sia_uri_changed or old_resources.oversized(new_resources):
- for child_cert in self.child_certs:
- child_resources = child_cert.cert.get_3779resources()
- if sia_uri_changed or child_resources.oversized(new_resources):
- child_cert.reissue(
- ca_detail = self,
- resources = child_resources & new_resources,
- publisher = publisher)
-
- if sia_uri_changed or validity_changed or old_resources.oversized(new_resources):
- for roa in self.roas:
- roa.update(publisher = publisher, fast = True)
-
- if sia_uri_changed or validity_changed:
- for ghostbuster in self.ghostbusters:
- ghostbuster.update(publisher = publisher, fast = True)
-
- publisher.call_pubd(callback, errback)
-
- logger.debug("Sending issue request to %r from %r", parent, self.update)
- parent.up_down_issue_query(ca, self, issued, errback)
-
-
- @classmethod
- def create(cls, ca):
- """
- Create a new ca_detail object for a specified CA.
- """
- self = cls()
- self.gctx = ca.gctx
- self.ca_id = ca.ca_id
- self.state = "pending"
-
- self.private_key_id = rpki.x509.RSA.generate()
- self.public_key = self.private_key_id.get_public()
-
- self.manifest_private_key_id = rpki.x509.RSA.generate()
- self.manifest_public_key = self.manifest_private_key_id.get_public()
-
- self.sql_store()
- return self
-
- def issue_ee(self, ca, resources, subject_key, sia,
- cn = None, sn = None, notAfter = None, eku = None):
- """
- Issue a new EE certificate.
- """
-
- if notAfter is None:
- notAfter = self.latest_ca_cert.getNotAfter()
-
- return self.latest_ca_cert.issue(
- keypair = self.private_key_id,
- subject_key = subject_key,
- serial = ca.next_serial_number(),
- sia = sia,
- aia = self.ca_cert_uri,
- crldp = self.crl_uri,
- resources = resources,
- notAfter = notAfter,
- is_ca = False,
- cn = cn,
- sn = sn,
- eku = eku)
-
- def generate_manifest_cert(self):
- """
- Generate a new manifest certificate for this ca_detail.
- """
-
- resources = rpki.resource_set.resource_bag.from_inheritance()
- self.latest_manifest_cert = self.issue_ee(
- ca = self.ca,
- resources = resources,
- subject_key = self.manifest_public_key,
- sia = (None, None, self.manifest_uri, self.ca.parent.repository.rrdp_notification_uri))
-
- def issue(self, ca, child, subject_key, sia, resources, publisher, child_cert = None):
- """
- Issue a new certificate to a child. Optional child_cert argument
- specifies an existing child_cert object to update in place; if not
- specified, we create a new one. Returns the child_cert object
- containing the newly issued cert.
- """
-
- self.check_failed_publication(publisher)
-
- assert child_cert is None or child_cert.child_id == child.child_id
-
- cert = self.latest_ca_cert.issue(
- keypair = self.private_key_id,
- subject_key = subject_key,
- serial = ca.next_serial_number(),
- aia = self.ca_cert_uri,
- crldp = self.crl_uri,
- sia = sia,
- resources = resources,
- notAfter = resources.valid_until)
-
- if child_cert is None:
- old_cert = None
- child_cert = rpki.rpkid.child_cert_obj(
- gctx = child.gctx,
- child_id = child.child_id,
- ca_detail_id = self.ca_detail_id,
- cert = cert)
- logger.debug("Created new child_cert %r", child_cert)
- else:
- old_cert = child_cert.cert
- child_cert.cert = cert
- del child_cert.ca_detail
- child_cert.ca_detail_id = self.ca_detail_id
- logger.debug("Reusing existing child_cert %r", child_cert)
-
- child_cert.ski = cert.get_SKI()
- child_cert.published = rpki.sundial.now()
- child_cert.sql_store()
- publisher.queue(
- uri = child_cert.uri,
- old_obj = old_cert,
- new_obj = child_cert.cert,
- repository = ca.parent.repository,
- handler = child_cert.published_callback)
- self.generate_manifest(publisher = publisher)
- return child_cert
-
- def generate_crl(self, publisher, nextUpdate = None):
- """
- Generate a new CRL for this ca_detail. At the moment this is
- unconditional, that is, it is up to the caller to decide whether a
- new CRL is needed.
- """
-
- self.check_failed_publication(publisher)
-
- ca = self.ca
- parent = ca.parent
- crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval)
- now = rpki.sundial.now()
-
- if nextUpdate is None:
- nextUpdate = now + crl_interval
-
- certlist = []
- for revoked_cert in self.revoked_certs:
- if now > revoked_cert.expires + crl_interval:
- revoked_cert.sql_delete()
- else:
- certlist.append((revoked_cert.serial, revoked_cert.revoked))
- certlist.sort()
-
- old_crl = self.latest_crl
-
- self.latest_crl = rpki.x509.CRL.generate(
- keypair = self.private_key_id,
- issuer = self.latest_ca_cert,
- serial = ca.next_crl_number(),
- thisUpdate = now,
- nextUpdate = nextUpdate,
- revokedCertificates = certlist)
-
- self.crl_published = rpki.sundial.now()
- self.sql_mark_dirty()
- publisher.queue(
- uri = self.crl_uri,
- old_obj = old_crl,
- new_obj = self.latest_crl,
- repository = parent.repository,
- handler = self.crl_published_callback)
-
- def crl_published_callback(self, pdu):
- """
- Check result of CRL publication.
- """
-
- rpki.publication.raise_if_error(pdu)
- self.crl_published = None
- self.sql_mark_dirty()
-
- def generate_manifest(self, publisher, nextUpdate = None):
- """
- Generate a new manifest for this ca_detail.
- """
-
- self.check_failed_publication(publisher)
-
- ca = self.ca
- parent = ca.parent
- crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval)
- now = rpki.sundial.now()
- uri = self.manifest_uri
-
- if nextUpdate is None:
- nextUpdate = now + crl_interval
-
- if (self.latest_manifest_cert is None or
- (self.latest_manifest_cert.getNotAfter() < nextUpdate and
- self.latest_manifest_cert.getNotAfter() < self.latest_ca_cert.getNotAfter())):
- logger.debug("Generating EE certificate for %s", uri)
- self.generate_manifest_cert()
- logger.debug("Latest CA cert notAfter %s, new %s EE notAfter %s",
- self.latest_ca_cert.getNotAfter(), uri, self.latest_manifest_cert.getNotAfter())
-
- logger.debug("Constructing manifest object list for %s", uri)
- objs = [(self.crl_uri_tail, self.latest_crl)]
- objs.extend((c.uri_tail, c.cert) for c in self.child_certs)
- objs.extend((r.uri_tail, r.roa) for r in self.roas if r.roa is not None)
- objs.extend((g.uri_tail, g.ghostbuster) for g in self.ghostbusters)
- objs.extend((e.uri_tail, e.cert) for e in self.ee_certificates)
-
- logger.debug("Building manifest object %s", uri)
- old_manifest = self.latest_manifest
- self.latest_manifest = rpki.x509.SignedManifest.build(
- serial = ca.next_manifest_number(),
- thisUpdate = now,
- nextUpdate = nextUpdate,
- names_and_objs = objs,
- keypair = self.manifest_private_key_id,
- certs = self.latest_manifest_cert)
-
- logger.debug("Manifest generation took %s", rpki.sundial.now() - now)
-
- self.manifest_published = rpki.sundial.now()
- self.sql_mark_dirty()
- publisher.queue(uri = uri,
- old_obj = old_manifest,
- new_obj = self.latest_manifest,
- repository = parent.repository,
- handler = self.manifest_published_callback)
-
- def manifest_published_callback(self, pdu):
- """
- Check result of manifest publication.
- """
-
- rpki.publication.raise_if_error(pdu)
- self.manifest_published = None
- self.sql_mark_dirty()
-
- def reissue(self, cb, eb):
- """
- Reissue all current certificates issued by this ca_detail.
- """
-
- publisher = publication_queue()
- self.check_failed_publication(publisher)
- for roa in self.roas:
- roa.regenerate(publisher, fast = True)
- for ghostbuster in self.ghostbusters:
- ghostbuster.regenerate(publisher, fast = True)
- for ee_certificate in self.ee_certificates:
- ee_certificate.reissue(publisher, force = True)
- for child_cert in self.child_certs:
- child_cert.reissue(self, publisher, force = True)
- self.gctx.sql.sweep()
- self.generate_manifest_cert()
- self.sql_mark_dirty()
- self.generate_crl(publisher = publisher)
- self.generate_manifest(publisher = publisher)
- self.gctx.sql.sweep()
- publisher.call_pubd(cb, eb)
-
- def check_failed_publication(self, publisher, check_all = True):
- """
- Check for failed publication of objects issued by this ca_detail.
-
- All publishable objects have timestamp fields recording time of
- last attempted publication, and callback methods which clear these
- timestamps once publication has succeeded. Our task here is to
- look for objects issued by this ca_detail which have timestamps
- set (indicating that they have not been published) and for which
- the timestamps are not very recent (for some definition of very
- recent -- intent is to allow a bit of slack in case pubd is just
- being slow). In such cases, we want to retry publication.
-
- As an optimization, we can probably skip checking other products
- if manifest and CRL have been published, thus saving ourselves
- several complex SQL queries. Not sure yet whether this
- optimization is worthwhile.
-
- For the moment we check everything without optimization, because
- it simplifies testing.
-
- For the moment our definition of staleness is hardwired; this
- should become configurable.
- """
-
- logger.debug("Checking for failed publication for %r", self)
-
- stale = rpki.sundial.now() - rpki.sundial.timedelta(seconds = 60)
- repository = self.ca.parent.repository
-
- if self.latest_crl is not None and \
- self.crl_published is not None and \
- self.crl_published < stale:
- logger.debug("Retrying publication for %s", self.crl_uri)
- publisher.queue(uri = self.crl_uri,
- new_obj = self.latest_crl,
- repository = repository,
- handler = self.crl_published_callback)
-
- if self.latest_manifest is not None and \
- self.manifest_published is not None and \
- self.manifest_published < stale:
- logger.debug("Retrying publication for %s", self.manifest_uri)
- publisher.queue(uri = self.manifest_uri,
- new_obj = self.latest_manifest,
- repository = repository,
- handler = self.manifest_published_callback)
-
- if not check_all:
- return
-
- # Might also be able to return here if manifest and CRL are up to
- # date, but let's avoid premature optimization
-
- for child_cert in self.unpublished_child_certs(stale):
- logger.debug("Retrying publication for %s", child_cert)
- publisher.queue(
- uri = child_cert.uri,
- new_obj = child_cert.cert,
- repository = repository,
- handler = child_cert.published_callback)
-
- for roa in self.unpublished_roas(stale):
- logger.debug("Retrying publication for %s", roa)
- publisher.queue(
- uri = roa.uri,
- new_obj = roa.roa,
- repository = repository,
- handler = roa.published_callback)
-
- for ghostbuster in self.unpublished_ghostbusters(stale):
- logger.debug("Retrying publication for %s", ghostbuster)
- publisher.queue(
- uri = ghostbuster.uri,
- new_obj = ghostbuster.ghostbuster,
- repository = repository,
- handler = ghostbuster.published_callback)
-
- for ee_cert in self.unpublished_ee_certificates(stale):
- logger.debug("Retrying publication for %s", ee_cert)
- publisher.queue(
- uri = ee_cert.uri,
- new_obj = ee_cert.cert,
- repository = repository,
- handler = ee_cert.published_callback)
-
-
-class child_cert_obj(rpki.sql.sql_persistent):
- """
- Certificate that has been issued to a child.
- """
-
- sql_template = rpki.sql.template(
- "child_cert",
- "child_cert_id",
- ("cert", rpki.x509.X509),
- "child_id",
- "ca_detail_id",
- "ski",
- ("published", rpki.sundial.datetime))
-
- def __repr__(self):
- args = [self]
- try:
- args.append(self.uri)
- except: # pylint: disable=W0702
- pass
- return rpki.log.log_repr(*args)
-
- def __init__(self, gctx = None, child_id = None, ca_detail_id = None, cert = None):
- """
- Initialize a child_cert_obj.
- """
-
- rpki.sql.sql_persistent.__init__(self)
- self.gctx = gctx
- self.child_id = child_id
- self.ca_detail_id = ca_detail_id
- self.cert = cert
- self.published = None
- if child_id or ca_detail_id or cert:
- self.sql_mark_dirty()
-
- @property
- @rpki.sql.cache_reference
- def child(self):
- """
- Fetch child object to which this child_cert object links.
- """
-
- return rpki.left_right.child_elt.sql_fetch(self.gctx, self.child_id)
-
- @property
- @rpki.sql.cache_reference
- def ca_detail(self):
- """
- Fetch ca_detail object to which this child_cert object links.
- """
-
- return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id)
-
- @ca_detail.deleter
- def ca_detail(self):
- try:
- del self._ca_detail
- except AttributeError:
- pass
-
- @property
- def uri_tail(self):
- """
- Return the tail (filename) portion of the URI for this child_cert.
- """
-
- return self.cert.gSKI() + ".cer"
-
- @property
- def uri(self):
- """
- Return the publication URI for this child_cert.
- """
-
- return self.ca_detail.ca.sia_uri + self.uri_tail
-
- def revoke(self, publisher, generate_crl_and_manifest = True):
- """
- Revoke a child cert.
- """
-
- ca_detail = self.ca_detail
- ca = ca_detail.ca
- logger.debug("Revoking %r %r", self, self.uri)
- revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail)
- publisher.queue(
- uri = self.uri,
- old_obj = self.cert,
- repository = ca.parent.repository)
- self.gctx.sql.sweep()
- self.sql_delete()
- if generate_crl_and_manifest:
- ca_detail.generate_crl(publisher = publisher)
- ca_detail.generate_manifest(publisher = publisher)
-
- def reissue(self, ca_detail, publisher, resources = None, sia = None, force = False):
- """
- Reissue an existing child cert, reusing the public key. If the
- child cert we would generate is identical to the one we already
- have, we just return the one we already have. If we have to
- revoke the old child cert when generating the new one, we have to
- generate a new child_cert_obj, so calling code that needs the
- updated child_cert_obj must use the return value from this method.
- """
-
- ca = ca_detail.ca
- child = self.child
-
- old_resources = self.cert.get_3779resources()
- old_sia = self.cert.get_SIA()
- old_aia = self.cert.get_AIA()[0]
- old_ca_detail = self.ca_detail
-
- needed = False
-
- if resources is None:
- resources = old_resources
-
- if sia is None:
- sia = old_sia
-
- assert resources.valid_until is not None and old_resources.valid_until is not None
-
- if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6:
- logger.debug("Resources changed for %r: old %s new %s", self, old_resources, resources)
- needed = True
-
- if resources.valid_until != old_resources.valid_until:
- logger.debug("Validity changed for %r: old %s new %s",
- self, old_resources.valid_until, resources.valid_until)
- needed = True
-
- if sia != old_sia:
- logger.debug("SIA changed for %r: old %r new %r", self, old_sia, sia)
- needed = True
-
- if ca_detail != old_ca_detail:
- logger.debug("Issuer changed for %r: old %r new %r", self, old_ca_detail, ca_detail)
- needed = True
-
- if ca_detail.ca_cert_uri != old_aia:
- logger.debug("AIA changed for %r: old %r new %r", self, old_aia, ca_detail.ca_cert_uri)
- needed = True
-
- must_revoke = old_resources.oversized(resources) or old_resources.valid_until > resources.valid_until
- if must_revoke:
- logger.debug("Must revoke any existing cert(s) for %r", self)
- needed = True
-
- if not needed and force:
- logger.debug("No change needed for %r, forcing reissuance anyway", self)
- needed = True
-
- if not needed:
- logger.debug("No change to %r", self)
- return self
-
- if must_revoke:
- for x in child.fetch_child_certs(ca_detail = ca_detail, ski = self.ski):
- logger.debug("Revoking child_cert %r", x)
- x.revoke(publisher = publisher)
- ca_detail.generate_crl(publisher = publisher)
- ca_detail.generate_manifest(publisher = publisher)
-
- child_cert = ca_detail.issue(
- ca = ca,
- child = child,
- subject_key = self.cert.getPublicKey(),
- sia = sia,
- resources = resources,
- child_cert = None if must_revoke else self,
- publisher = publisher)
-
- logger.debug("New child_cert %r uri %s", child_cert, child_cert.uri)
-
- return child_cert
-
- @classmethod
- def fetch(cls, gctx = None, child = None, ca_detail = None, ski = None, unique = False, unpublished = None):
- """
- Fetch all child_cert objects matching a particular set of
- parameters. This is a wrapper to consolidate various queries that
- would otherwise be inline SQL WHERE expressions. In most cases
- code calls this indirectly, through methods in other classes.
- """
-
- args = []
- where = []
-
- if child:
- where.append("child_id = %s")
- args.append(child.child_id)
-
- if ca_detail:
- where.append("ca_detail_id = %s")
- args.append(ca_detail.ca_detail_id)
-
- if ski:
- where.append("ski = %s")
- args.append(ski)
-
- if unpublished is not None:
- where.append("published IS NOT NULL AND published < %s")
- args.append(unpublished)
-
- where = " AND ".join(where)
-
- gctx = gctx or (child and child.gctx) or (ca_detail and ca_detail.gctx) or None
-
- if unique:
- return cls.sql_fetch_where1(gctx, where, args)
- else:
- return cls.sql_fetch_where(gctx, where, args)
-
- def published_callback(self, pdu):
- """
- Publication callback: check result and mark published.
- """
-
- rpki.publication.raise_if_error(pdu)
- self.published = None
- self.sql_mark_dirty()
-
-class revoked_cert_obj(rpki.sql.sql_persistent):
- """
- Tombstone for a revoked certificate.
- """
-
- sql_template = rpki.sql.template(
- "revoked_cert",
- "revoked_cert_id",
- "serial",
- "ca_detail_id",
- ("revoked", rpki.sundial.datetime),
- ("expires", rpki.sundial.datetime))
-
- def __repr__(self):
- return rpki.log.log_repr(self, repr(self.ca_detail), self.serial, self.revoked)
-
- def __init__(self, gctx = None, serial = None, revoked = None, expires = None, ca_detail_id = None):
- """
- Initialize a revoked_cert_obj.
- """
-
- rpki.sql.sql_persistent.__init__(self)
- self.gctx = gctx
- self.serial = serial
- self.revoked = revoked
- self.expires = expires
- self.ca_detail_id = ca_detail_id
- if serial or revoked or expires or ca_detail_id:
- self.sql_mark_dirty()
-
- @property
- @rpki.sql.cache_reference
- def ca_detail(self):
- """
- Fetch ca_detail object to which this revoked_cert_obj links.
- """
-
- return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id)
-
- @classmethod
- def revoke(cls, cert, ca_detail):
- """
- Revoke a certificate.
- """
-
- return cls(
- serial = cert.getSerial(),
- expires = cert.getNotAfter(),
- revoked = rpki.sundial.now(),
- gctx = ca_detail.gctx,
- ca_detail_id = ca_detail.ca_detail_id)
-
-class roa_obj(rpki.sql.sql_persistent):
- """
- Route Origin Authorization.
- """
-
- sql_template = rpki.sql.template(
- "roa",
- "roa_id",
- "ca_detail_id",
- "self_id",
- "asn",
- ("roa", rpki.x509.ROA),
- ("cert", rpki.x509.X509),
- ("published", rpki.sundial.datetime))
-
- ca_detail_id = None
- cert = None
- roa = None
- published = None
-
- @property
- @rpki.sql.cache_reference
- def self(self):
- """
- Fetch self object to which this roa_obj links.
- """
- return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id)
-
- @property
- @rpki.sql.cache_reference
- def ca_detail(self):
- """
- Fetch ca_detail object to which this roa_obj links.
- """
-
- return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id)
-
- @ca_detail.deleter
- def ca_detail(self):
- try:
- del self._ca_detail
- except AttributeError:
- pass
-
- def sql_fetch_hook(self):
- """
- Extra SQL fetch actions for roa_obj -- handle prefix lists.
- """
-
- for version, datatype, attribute in ((4, rpki.resource_set.roa_prefix_set_ipv4, "ipv4"),
- (6, rpki.resource_set.roa_prefix_set_ipv6, "ipv6")):
- setattr(self, attribute, datatype.from_sql(
- self.gctx.sql,
- """
- SELECT prefix, prefixlen, max_prefixlen FROM roa_prefix
- WHERE roa_id = %s AND version = %s
- """,
- (self.roa_id, version)))
-
- def sql_insert_hook(self):
- """
- Extra SQL insert actions for roa_obj -- handle prefix lists.
- """
-
- for version, prefix_set in ((4, self.ipv4), (6, self.ipv6)):
- if prefix_set:
- self.gctx.sql.executemany(
- """
- INSERT roa_prefix (roa_id, prefix, prefixlen, max_prefixlen, version)
- VALUES (%s, %s, %s, %s, %s)
- """,
- ((self.roa_id, x.prefix, x.prefixlen, x.max_prefixlen, version)
- for x in prefix_set))
-
- def sql_delete_hook(self):
- """
- Extra SQL delete actions for roa_obj -- handle prefix lists.
- """
-
- self.gctx.sql.execute("DELETE FROM roa_prefix WHERE roa_id = %s", (self.roa_id,))
-
- def __repr__(self):
- args = [self, self.asn, self.ipv4, self.ipv6]
- try:
- args.append(self.uri)
- except: # pylint: disable=W0702
- pass
- return rpki.log.log_repr(*args)
-
- def __init__(self, gctx = None, self_id = None, asn = None, ipv4 = None, ipv6 = None):
- rpki.sql.sql_persistent.__init__(self)
- self.gctx = gctx
- self.self_id = self_id
- self.asn = asn
- self.ipv4 = ipv4
- self.ipv6 = ipv6
-
- # Defer marking new ROA as dirty until .generate() has a chance to
- # finish setup, otherwise we get SQL consistency errors.
- #
- #if self_id or asn or ipv4 or ipv6: self.sql_mark_dirty()
-
- def update(self, publisher, fast = False):
- """
- Bring this roa_obj's ROA up to date if necesssary.
- """
-
- v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4()
- v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6()
-
- if self.roa is None:
- logger.debug("%r doesn't exist, generating", self)
- return self.generate(publisher = publisher, fast = fast)
-
- ca_detail = self.ca_detail
-
- if ca_detail is None:
- logger.debug("%r has no associated ca_detail, generating", self)
- return self.generate(publisher = publisher, fast = fast)
-
- if ca_detail.state != "active":
- logger.debug("ca_detail associated with %r not active (state %s), regenerating", self, ca_detail.state)
- return self.regenerate(publisher = publisher, fast = fast)
-
- now = rpki.sundial.now()
- regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin)
-
- if now > regen_time and self.cert.getNotAfter() < ca_detail.latest_ca_cert.getNotAfter():
- logger.debug("%r past threshold %s, regenerating", self, regen_time)
- return self.regenerate(publisher = publisher, fast = fast)
-
- if now > regen_time:
- logger.warning("%r is past threshold %s but so is issuer %r, can't regenerate", self, regen_time, ca_detail)
-
- ca_resources = ca_detail.latest_ca_cert.get_3779resources()
- ee_resources = self.cert.get_3779resources()
-
- if ee_resources.oversized(ca_resources):
- logger.debug("%r oversized with respect to CA, regenerating", self)
- return self.regenerate(publisher = publisher, fast = fast)
-
- if ee_resources.v4 != v4 or ee_resources.v6 != v6:
- logger.debug("%r resources do not match EE, regenerating", self)
- return self.regenerate(publisher = publisher, fast = fast)
-
- if self.cert.get_AIA()[0] != ca_detail.ca_cert_uri:
- logger.debug("%r AIA changed, regenerating", self)
- return self.regenerate(publisher = publisher, fast = fast)
-
- def generate(self, publisher, fast = False):
- """
- Generate a ROA.
-
- At present we have no way of performing a direct lookup from a
- desired set of resources to a covering certificate, so we have to
- search. This could be quite slow if we have a lot of active
- ca_detail objects. Punt on the issue for now, revisit if
- profiling shows this as a hotspot.
-
- Once we have the right covering certificate, we generate the ROA
- payload, generate a new EE certificate, use the EE certificate to
- sign the ROA payload, publish the result, then throw away the
- private key for the EE cert, all per the ROA specification. This
- implies that generating a lot of ROAs will tend to thrash
- /dev/random, but there is not much we can do about that.
-
- If fast is set, we leave generating the new manifest for our
- caller to handle, presumably at the end of a bulk operation.
- """
-
- if self.ipv4 is None and self.ipv6 is None:
- raise rpki.exceptions.EmptyROAPrefixList
-
- # Ugly and expensive search for covering ca_detail, there has to
- # be a better way, but it would require the ability to test for
- # resource subsets in SQL.
-
- v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4()
- v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6()
-
- ca_detail = self.ca_detail
- if ca_detail is None or ca_detail.state != "active" or ca_detail.has_expired():
- logger.debug("Searching for new ca_detail for ROA %r", self)
- ca_detail = None
- for parent in self.self.parents:
- for ca in parent.cas:
- ca_detail = ca.active_ca_detail
- assert ca_detail is None or ca_detail.state == "active"
- if ca_detail is not None and not ca_detail.has_expired():
- resources = ca_detail.latest_ca_cert.get_3779resources()
- if v4.issubset(resources.v4) and v6.issubset(resources.v6):
- break
- ca_detail = None
- if ca_detail is not None:
- break
- else:
- logger.debug("Keeping old ca_detail for ROA %r", self)
-
- if ca_detail is None:
- raise rpki.exceptions.NoCoveringCertForROA("Could not find a certificate covering %r" % self)
-
- logger.debug("Using new ca_detail %r for ROA %r, ca_detail_state %s",
- ca_detail, self, ca_detail.state)
-
- ca = ca_detail.ca
- resources = rpki.resource_set.resource_bag(v4 = v4, v6 = v6)
- keypair = rpki.x509.RSA.generate()
-
- del self.ca_detail
- self.ca_detail_id = ca_detail.ca_detail_id
- self.cert = ca_detail.issue_ee(
- ca = ca,
- resources = resources,
- subject_key = keypair.get_public(),
- sia = (None, None, self.uri_from_key(keypair), ca.parent.repository.rrdp_notification_uri))
- self.roa = rpki.x509.ROA.build(self.asn, self.ipv4, self.ipv6, keypair, (self.cert,))
- self.published = rpki.sundial.now()
- self.sql_store()
-
- logger.debug("Generating %r URI %s", self, self.uri)
- publisher.queue(
- uri = self.uri,
- new_obj = self.roa,
- repository = ca.parent.repository,
- handler = self.published_callback)
- if not fast:
- ca_detail.generate_manifest(publisher = publisher)
-
-
- def published_callback(self, pdu):
- """
- Check publication result.
- """
-
- rpki.publication.raise_if_error(pdu)
- self.published = None
- self.sql_mark_dirty()
-
- def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False):
- """
- Withdraw ROA associated with this roa_obj.
-
- In order to preserve make-before-break properties without
- duplicating code, this method also handles generating a
- replacement ROA when requested.
-
- If allow_failure is set, failing to withdraw the ROA will not be
- considered an error.
-
- If fast is set, SQL actions will be deferred, on the assumption
- that our caller will handle regenerating CRL and manifest and
- flushing the SQL cache.
- """
-
- ca_detail = self.ca_detail
- cert = self.cert
- roa = self.roa
- uri = self.uri
-
- logger.debug("%s %r, ca_detail %r state is %s",
- "Regenerating" if regenerate else "Not regenerating",
- self, ca_detail, ca_detail.state)
-
- if regenerate:
- self.generate(publisher = publisher, fast = fast)
-
- logger.debug("Withdrawing %r %s and revoking its EE cert", self, uri)
- rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail)
- publisher.queue(uri = uri,
- old_obj = roa,
- repository = ca_detail.ca.parent.repository,
- handler = False if allow_failure else None)
-
- if not regenerate:
- self.sql_mark_deleted()
-
- if not fast:
- ca_detail.generate_crl(publisher = publisher)
- ca_detail.generate_manifest(publisher = publisher)
- self.gctx.sql.sweep()
-
- def regenerate(self, publisher, fast = False):
- """
- Reissue ROA associated with this roa_obj.
- """
-
- if self.ca_detail is None:
- self.generate(publisher = publisher, fast = fast)
- else:
- self.revoke(publisher = publisher, regenerate = True, fast = fast)
-
- def uri_from_key(self, key):
- """
- Return publication URI for a public key.
- """
-
- return self.ca_detail.ca.sia_uri + key.gSKI() + ".roa"
-
- @property
- def uri(self):
- """
- Return the publication URI for this roa_obj's ROA.
- """
-
- return self.ca_detail.ca.sia_uri + self.uri_tail
-
- @property
- def uri_tail(self):
- """
- Return the tail (filename portion) of the publication URI for this
- roa_obj's ROA.
- """
-
- return self.cert.gSKI() + ".roa"
-
-
-class ghostbuster_obj(rpki.sql.sql_persistent):
- """
- Ghostbusters record.
- """
-
- sql_template = rpki.sql.template(
- "ghostbuster",
- "ghostbuster_id",
- "ca_detail_id",
- "self_id",
- "vcard",
- ("ghostbuster", rpki.x509.Ghostbuster),
- ("cert", rpki.x509.X509),
- ("published", rpki.sundial.datetime))
-
- ca_detail_id = None
- cert = None
- ghostbuster = None
- published = None
- vcard = None
-
- def __repr__(self):
- args = [self]
- try:
- args.extend(self.vcard.splitlines()[2:-1])
- except: # pylint: disable=W0702
- pass
- try:
- args.append(self.uri)
- except: # pylint: disable=W0702
- pass
- return rpki.log.log_repr(*args)
-
- @property
- @rpki.sql.cache_reference
- def self(self):
- """
- Fetch self object to which this ghostbuster_obj links.
- """
-
- return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id)
-
- @property
- @rpki.sql.cache_reference
- def ca_detail(self):
- """
- Fetch ca_detail object to which this ghostbuster_obj links.
- """
-
- return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id)
-
- def __init__(self, gctx = None, self_id = None, ca_detail_id = None, vcard = None):
- rpki.sql.sql_persistent.__init__(self)
- self.gctx = gctx
- self.self_id = self_id
- self.ca_detail_id = ca_detail_id
- self.vcard = vcard
-
- # Defer marking new ghostbuster as dirty until .generate() has a chance to
- # finish setup, otherwise we get SQL consistency errors.
-
- def update(self, publisher, fast = False):
- """
- Bring this ghostbuster_obj up to date if necesssary.
- """
-
- if self.ghostbuster is None:
- logger.debug("Ghostbuster record doesn't exist, generating")
- return self.generate(publisher = publisher, fast = fast)
-
- now = rpki.sundial.now()
- regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin)
-
- if now > regen_time and self.cert.getNotAfter() < self.ca_detail.latest_ca_cert.getNotAfter():
- logger.debug("%r past threshold %s, regenerating", self, regen_time)
- return self.regenerate(publisher = publisher, fast = fast)
-
- if now > regen_time:
- logger.warning("%r is past threshold %s but so is issuer %r, can't regenerate", self, regen_time, self.ca_detail)
-
- if self.cert.get_AIA()[0] != self.ca_detail.ca_cert_uri:
- logger.debug("%r AIA changed, regenerating", self)
- return self.regenerate(publisher = publisher, fast = fast)
-
- def generate(self, publisher, fast = False):
- """
- Generate a Ghostbuster record
-
- Once we have the right covering certificate, we generate the
- ghostbuster payload, generate a new EE certificate, use the EE
- certificate to sign the ghostbuster payload, publish the result,
- then throw away the private key for the EE cert. This is modeled
- after the way we handle ROAs.
-
- If fast is set, we leave generating the new manifest for our
- caller to handle, presumably at the end of a bulk operation.
- """
-
- ca_detail = self.ca_detail
- ca = ca_detail.ca
-
- resources = rpki.resource_set.resource_bag.from_inheritance()
- keypair = rpki.x509.RSA.generate()
-
- self.cert = ca_detail.issue_ee(
- ca = ca,
- resources = resources,
- subject_key = keypair.get_public(),
- sia = (None, None, self.uri_from_key(keypair), ca.parent.repository.rrdp_notification_uri))
- self.ghostbuster = rpki.x509.Ghostbuster.build(self.vcard, keypair, (self.cert,))
- self.published = rpki.sundial.now()
- self.sql_store()
-
- logger.debug("Generating Ghostbuster record %r", self.uri)
- publisher.queue(
- uri = self.uri,
- new_obj = self.ghostbuster,
- repository = ca.parent.repository,
- handler = self.published_callback)
- if not fast:
- ca_detail.generate_manifest(publisher = publisher)
-
- def published_callback(self, pdu):
- """
- Check publication result.
- """
-
- rpki.publication.raise_if_error(pdu)
- self.published = None
- self.sql_mark_dirty()
-
- def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False):
- """
- Withdraw Ghostbuster associated with this ghostbuster_obj.
-
- In order to preserve make-before-break properties without
- duplicating code, this method also handles generating a
- replacement ghostbuster when requested.
-
- If allow_failure is set, failing to withdraw the ghostbuster will not be
- considered an error.
-
- If fast is set, SQL actions will be deferred, on the assumption
- that our caller will handle regenerating CRL and manifest and
- flushing the SQL cache.
- """
-
- ca_detail = self.ca_detail
- cert = self.cert
- ghostbuster = self.ghostbuster
- uri = self.uri
-
- logger.debug("%s %r, ca_detail %r state is %s",
- "Regenerating" if regenerate else "Not regenerating",
- self, ca_detail, ca_detail.state)
-
- if regenerate:
- self.generate(publisher = publisher, fast = fast)
-
- logger.debug("Withdrawing %r %s and revoking its EE cert", self, uri)
- rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail)
- publisher.queue(uri = uri,
- old_obj = ghostbuster,
- repository = ca_detail.ca.parent.repository,
- handler = False if allow_failure else None)
-
- if not regenerate:
- self.sql_mark_deleted()
-
- if not fast:
- ca_detail.generate_crl(publisher = publisher)
- ca_detail.generate_manifest(publisher = publisher)
- self.gctx.sql.sweep()
-
- def regenerate(self, publisher, fast = False):
- """
- Reissue Ghostbuster associated with this ghostbuster_obj.
- """
-
- if self.ghostbuster is None:
- self.generate(publisher = publisher, fast = fast)
- else:
- self.revoke(publisher = publisher, regenerate = True, fast = fast)
-
- def uri_from_key(self, key):
- """
- Return publication URI for a public key.
- """
-
- return self.ca_detail.ca.sia_uri + key.gSKI() + ".gbr"
-
- @property
- def uri(self):
- """
- Return the publication URI for this ghostbuster_obj's ghostbuster.
- """
-
- return self.ca_detail.ca.sia_uri + self.uri_tail
-
- @property
- def uri_tail(self):
- """
- Return the tail (filename portion) of the publication URI for this
- ghostbuster_obj's ghostbuster.
- """
-
- return self.cert.gSKI() + ".gbr"
-
-
-class ee_cert_obj(rpki.sql.sql_persistent):
- """
- EE certificate (router certificate or generic).
- """
-
- sql_template = rpki.sql.template(
- "ee_cert",
- "ee_cert_id",
- "self_id",
- "ca_detail_id",
- "ski",
- ("cert", rpki.x509.X509),
- ("published", rpki.sundial.datetime))
-
- def __repr__(self):
- return rpki.log.log_repr(self, self.cert.getSubject(), self.uri)
-
- def __init__(self, gctx = None, self_id = None, ca_detail_id = None, cert = None):
- rpki.sql.sql_persistent.__init__(self)
- self.gctx = gctx
- self.self_id = self_id
- self.ca_detail_id = ca_detail_id
- self.cert = cert
- self.ski = None if cert is None else cert.get_SKI()
- self.published = None
- if self_id or ca_detail_id or cert:
- self.sql_mark_dirty()
-
- @property
- @rpki.sql.cache_reference
- def self(self):
- """
- Fetch self object to which this ee_cert_obj links.
- """
-
- return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id)
-
- @property
- @rpki.sql.cache_reference
- def ca_detail(self):
- """
- Fetch ca_detail object to which this ee_cert_obj links.
- """
-
- return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id)
-
- @ca_detail.deleter
- def ca_detail(self):
- try:
- del self._ca_detail
- except AttributeError:
- pass
-
- @property
- def gski(self):
- """
- Calculate g(SKI), for ease of comparison with XML.
-
- Although, really, one has to ask why we don't just store g(SKI)
- in rpkid.sql instead of ski....
- """
-
- return base64.urlsafe_b64encode(self.ski).rstrip("=")
-
- @gski.setter
- def gski(self, val):
- self.ski = base64.urlsafe_b64decode(val + ("=" * ((4 - len(val)) % 4)))
-
- @property
- def uri(self):
- """
- Return the publication URI for this ee_cert_obj.
- """
-
- return self.ca_detail.ca.sia_uri + self.uri_tail
-
- @property
- def uri_tail(self):
- """
- Return the tail (filename portion) of the publication URI for this
- ee_cert_obj.
- """
-
- return self.cert.gSKI() + ".cer"
-
- @classmethod
- def create(cls, ca_detail, subject_name, subject_key, resources, publisher, eku = None):
- """
- Generate a new certificate and stuff it in a new ee_cert_obj.
- """
-
- cn, sn = subject_name.extract_cn_and_sn()
- ca = ca_detail.ca
-
- sia = (None, None, ca_detail.ca.sia_uri + subject_key.gSKI() + ".cer", ca.parent.repository.rrdp_notification_uri)
-
- cert = ca_detail.issue_ee(
- ca = ca,
- subject_key = subject_key,
- sia = sia,
- resources = resources,
- notAfter = resources.valid_until,
- cn = cn,
- sn = sn,
- eku = eku)
-
- self = cls(
- gctx = ca_detail.gctx,
- self_id = ca.parent.self.self_id,
- ca_detail_id = ca_detail.ca_detail_id,
- cert = cert)
-
- publisher.queue(
- uri = self.uri,
- new_obj = self.cert,
- repository = ca.parent.repository,
- handler = self.published_callback)
-
- self.sql_store()
-
- ca_detail.generate_manifest(publisher = publisher)
-
- logger.debug("New ee_cert %r", self)
-
- return self
-
- def revoke(self, publisher, generate_crl_and_manifest = True):
- """
- Revoke and withdraw an EE certificate.
- """
-
- ca_detail = self.ca_detail
- ca = ca_detail.ca
- logger.debug("Revoking %r %r", self, self.uri)
- revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail)
- publisher.queue(uri = self.uri,
- old_obj = self.cert,
- repository = ca.parent.repository)
- self.gctx.sql.sweep()
- self.sql_delete()
- if generate_crl_and_manifest:
- ca_detail.generate_crl(publisher = publisher)
- ca_detail.generate_manifest(publisher = publisher)
-
- def reissue(self, publisher, ca_detail = None, resources = None, force = False):
- """
- Reissue an existing EE cert, reusing the public key. If the EE
- cert we would generate is identical to the one we already have, we
- just return; if we need to reissue, we reuse this ee_cert_obj and
- just update its contents, as the publication URI will not have
- changed.
- """
-
- needed = False
-
- old_cert = self.cert
-
- old_ca_detail = self.ca_detail
- if ca_detail is None:
- ca_detail = old_ca_detail
-
- assert ca_detail.ca is old_ca_detail.ca
-
- old_resources = old_cert.get_3779resources()
- if resources is None:
- resources = old_resources
-
- assert resources.valid_until is not None and old_resources.valid_until is not None
-
- assert ca_detail.covers(resources)
-
- if ca_detail != self.ca_detail:
- logger.debug("ca_detail changed for %r: old %r new %r",
- self, self.ca_detail, ca_detail)
- needed = True
-
- if ca_detail.ca_cert_uri != old_cert.get_AIA()[0]:
- logger.debug("AIA changed for %r: old %s new %s",
- self, old_cert.get_AIA()[0], ca_detail.ca_cert_uri)
- needed = True
-
- if resources.valid_until != old_resources.valid_until:
- logger.debug("Validity changed for %r: old %s new %s",
- self, old_resources.valid_until, resources.valid_until)
- needed = True
-
- if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6:
- logger.debug("Resources changed for %r: old %s new %s",
- self, old_resources, resources)
- needed = True
-
- must_revoke = (old_resources.oversized(resources) or
- old_resources.valid_until > resources.valid_until)
- if must_revoke:
- logger.debug("Must revoke existing cert(s) for %r", self)
- needed = True
-
- if not needed and force:
- logger.debug("No change needed for %r, forcing reissuance anyway", self)
- needed = True
-
- if not needed:
- logger.debug("No change to %r", self)
- return
-
- cn, sn = self.cert.getSubject().extract_cn_and_sn()
-
- self.cert = ca_detail.issue_ee(
- ca = ca_detail.ca,
- subject_key = self.cert.getPublicKey(),
- eku = self.cert.get_EKU(),
- sia = (None, None, self.uri, ca_detail.ca.parent.repository.rrdp_notification_uri),
- resources = resources,
- notAfter = resources.valid_until,
- cn = cn,
- sn = sn)
-
- self.sql_mark_dirty()
-
- publisher.queue(
- uri = self.uri,
- old_obj = old_cert,
- new_obj = self.cert,
- repository = ca_detail.ca.parent.repository,
- handler = self.published_callback)
-
- if must_revoke:
- revoked_cert_obj.revoke(cert = old_cert.cert, ca_detail = old_ca_detail)
-
- self.gctx.sql.sweep()
-
- if must_revoke:
- ca_detail.generate_crl(publisher = publisher)
- self.gctx.sql.sweep()
-
- ca_detail.generate_manifest(publisher = publisher)
-
- def published_callback(self, pdu):
- """
- Publication callback: check result and mark published.
- """
-
- rpki.publication.raise_if_error(pdu)
- self.published = None
- self.sql_mark_dirty()
-
class publication_queue(object):
"""
@@ -2770,7 +602,8 @@ class publication_queue(object):
replace = True
- def __init__(self):
+ def __init__(self, rpkid):
+ self.rpkid = rpkid
self.clear()
def clear(self):
@@ -2831,7 +664,7 @@ class publication_queue(object):
def call_pubd(self, cb, eb):
def loop(iterator, rid):
logger.debug("Calling pubd[%r]", self.repositories[rid])
- self.repositories[rid].call_pubd(iterator, eb, self.msgs[rid], self.handlers)
+ self.repositories[rid].call_pubd(self.rpkid, iterator, eb, self.msgs[rid], self.handlers)
def done():
self.clear()
cb()
diff --git a/rpki/rpkid_tasks.py b/rpki/rpkid_tasks.py
index c44b2220..f6afad1e 100644
--- a/rpki/rpkid_tasks.py
+++ b/rpki/rpkid_tasks.py
@@ -87,11 +87,6 @@ class AbstractTask(object):
"""
Abstract base class for rpkid scheduler task objects. This just
handles the scheduler hooks, real work starts in self.start.
-
- NB: This assumes that the rpki.rpkid.rpkid.task_* methods have been
- rewritten to expect instances of subclasses of this class, rather
- than expecting thunks to be wrapped up in the older version of this
- class. Rewrite, rewrite, remove this comment when done, OK!
"""
## @var timeslice
@@ -100,7 +95,8 @@ class AbstractTask(object):
timeslice = rpki.sundial.timedelta(seconds = 15)
- def __init__(self, s, description = None):
+ def __init__(self, rpkid, s, description = None):
+ self.rpkid = rpkid
self.self = s
self.description = description
self.completions = []
@@ -115,19 +111,17 @@ class AbstractTask(object):
self.completions.append(completion)
def exit(self):
- self.self.gctx.sql.sweep()
while self.completions:
self.completions.pop(0)(self)
self.clear()
self.due_date = None
- self.self.gctx.task_next()
+ self.rpkid.task_next()
def postpone(self, continuation):
- self.self.gctx.sql.sweep()
self.continuation = continuation
self.due_date = None
- self.self.gctx.task_add(self)
- self.self.gctx.task_next()
+ self.rpkid.task_add(self)
+ self.rpkid.task_next()
def __call__(self):
self.due_date = rpki.sundial.now() + self.timeslice
@@ -163,58 +157,75 @@ class PollParentTask(AbstractTask):
"""
def clear(self):
+ logger.debug("PollParentTask.clear()")
self.parent_iterator = None
self.parent = None
self.ca_map = None
self.class_iterator = None
+ self.started = False
def start(self):
- self.gctx.checkpoint()
- logger.debug("Self %s[%d] polling parents", self.self_handle, self.self_id)
- rpki.async.iterator(self.parents, self.parent_loop, self.exit)
+ logger.debug("PollParentTask.start()")
+ self.rpkid.checkpoint()
+ logger.debug("Self %s[%r] polling parents", self.self_handle, self)
+ assert not self.started
+ self.started = True
+ #
+ # XXX Apparently "self" is a //really// bad choice for a column name with Django
+ #
+ rpki.async.iterator(rpki.rpkidb.models.Parent.objects.filter(self__exact = self.self), self.parent_loop, self.exit)
def parent_loop(self, parent_iterator, parent):
+ logger.debug("PollParentTask.parent_loop()")
self.parent_iterator = parent_iterator
self.parent = parent
- parent.up_down_list_query(self.got_list, self.list_failed)
+ parent.up_down_list_query(rpkid = self.rpkid, cb = self.got_list, eb = self.list_failed)
def got_list(self, r_msg):
- self.ca_map = dict((ca.parent_resource_class, ca) for ca in self.parent.cas)
- self.gctx.checkpoint()
+ logger.debug("PollParentTask.got_list()")
+ self.ca_map = dict((ca.parent_resource_class, ca) for ca in self.parent.cas.all())
+ self.rpkid.checkpoint()
rpki.async.iterator(r_msg.getiterator(rpki.up_down.tag_class), self.class_loop, self.class_done)
def list_failed(self, e):
+ logger.debug("PollParentTask.list_failed()")
logger.exception("Couldn't get resource class list from parent %r, skipping", self.parent)
self.parent_iterator()
def class_loop(self, class_iterator, rc):
- self.gctx.checkpoint()
+ logger.debug("PollParentTask.class_loop()")
+ self.rpkid.checkpoint()
self.class_iterator = class_iterator
try:
ca = self.ca_map.pop(rc.get("class_name"))
except KeyError:
- rpki.rpkid.ca_obj.create(self.parent, rc, class_iterator, self.class_create_failed)
+ rpki.rpkidb.models.CA.create(rpkid = self.rpkid, parent = self.parent, rc = rc,
+ cb = class_iterator, eb = self.class_create_failed)
else:
- ca.check_for_updates(self.parent, rc, class_iterator, self.class_update_failed)
+ ca.check_for_updates(rpkid = self.rpkid, parent = self.parent, rc = rc, cb = class_iterator, eb = self.class_update_failed)
def class_update_failed(self, e):
+ logger.debug("PollParentTask.class_update_failed()")
logger.exception("Couldn't update class, skipping")
self.class_iterator()
def class_create_failed(self, e):
+ logger.debug("PollParentTask.class_create_failed()")
logger.exception("Couldn't create class, skipping")
self.class_iterator()
def class_done(self):
+ logger.debug("PollParentTask.class_done()")
rpki.async.iterator(self.ca_map.values(), self.ca_loop, self.ca_done)
def ca_loop(self, iterator, ca):
- self.gctx.checkpoint()
+ logger.debug("PollParentTask.ca_loop()")
+ self.rpkid.checkpoint()
ca.destroy(self.parent, iterator)
def ca_done(self):
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
+ logger.debug("PollParentTask.ca_done()")
+ self.rpkid.checkpoint()
self.parent_iterator()
@@ -233,18 +244,23 @@ class UpdateChildrenTask(AbstractTask):
self.iterator = None
self.child = None
self.child_certs = None
+ self.started = False
def start(self):
- self.gctx.checkpoint()
- logger.debug("Self %s[%d] updating children", self.self_handle, self.self_id)
+ self.rpkid.checkpoint()
+ logger.debug("Self %s[%r] updating children", self.self_handle, self)
+ assert not self.started
+ self.started = True
self.now = rpki.sundial.now()
self.rsn = self.now + rpki.sundial.timedelta(seconds = self.regen_margin)
- self.publisher = rpki.rpkid.publication_queue()
- rpki.async.iterator(self.children, self.loop, self.done)
+ self.publisher = rpki.rpkid.publication_queue(self.rpkid)
+ #
+ # XXX Apparently "self" is a //really// bad choice for a column name with Django
+ #
+ rpki.async.iterator(rpki.rpkidb.models.Child.objects.filter(self__exact = self.self), self.loop, self.done)
def loop(self, iterator, child):
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
+ self.rpkid.checkpoint()
self.iterator = iterator
self.child = child
self.child_certs = child.child_certs
@@ -255,7 +271,7 @@ class UpdateChildrenTask(AbstractTask):
def do_child(self):
if self.child_certs:
- self.gctx.irdb_query_child_resources(self.child.self.self_handle, self.child.child_handle,
+ self.rpkid.irdb_query_child_resources(self.child.self.self_handle, self.child.child_handle,
self.got_resources, self.lose)
else:
self.iterator()
@@ -266,74 +282,70 @@ class UpdateChildrenTask(AbstractTask):
def got_resources(self, irdb_resources):
try:
- for child_cert in self.child_certs:
+ for child_cert in self.child_certs.filter(ca_detail__state = "active"):
ca_detail = child_cert.ca_detail
- ca = ca_detail.ca
- if ca_detail.state == "active":
- old_resources = child_cert.cert.get_3779resources()
- new_resources = old_resources & irdb_resources & ca_detail.latest_ca_cert.get_3779resources()
- old_aia = child_cert.cert.get_AIA()[0]
- new_aia = ca_detail.ca_cert_uri
-
- if new_resources.empty():
- logger.debug("Resources shrank to the null set, revoking and withdrawing child %s certificate SKI %s",
- self.child.child_handle, child_cert.cert.gSKI())
- child_cert.revoke(publisher = self.publisher)
- ca_detail.generate_crl(publisher = self.publisher)
- ca_detail.generate_manifest(publisher = self.publisher)
-
- elif (old_resources != new_resources or
- old_aia != new_aia or
- (old_resources.valid_until < self.rsn and
- irdb_resources.valid_until > self.now and
- old_resources.valid_until != irdb_resources.valid_until)):
-
- logger.debug("Need to reissue child %s certificate SKI %s",
- self.child.child_handle, child_cert.cert.gSKI())
- if old_resources != new_resources:
- logger.debug("Child %s SKI %s resources changed: old %s new %s",
- self.child.child_handle, child_cert.cert.gSKI(),
- old_resources, new_resources)
- if old_resources.valid_until != irdb_resources.valid_until:
- logger.debug("Child %s SKI %s validity changed: old %s new %s",
- self.child.child_handle, child_cert.cert.gSKI(),
- old_resources.valid_until, irdb_resources.valid_until)
-
- new_resources.valid_until = irdb_resources.valid_until
- child_cert.reissue(
- ca_detail = ca_detail,
- resources = new_resources,
- publisher = self.publisher)
-
- elif old_resources.valid_until < self.now:
- logger.debug("Child %s certificate SKI %s has expired: cert.valid_until %s, irdb.valid_until %s",
+ old_resources = child_cert.cert.get_3779resources()
+ new_resources = old_resources & irdb_resources & ca_detail.latest_ca_cert.get_3779resources()
+ old_aia = child_cert.cert.get_AIA()[0]
+ new_aia = ca_detail.ca_cert_uri
+
+ if new_resources.empty():
+ logger.debug("Resources shrank to the null set, revoking and withdrawing child %s certificate SKI %s",
+ self.child.child_handle, child_cert.cert.gSKI())
+ child_cert.revoke(publisher = self.publisher)
+ ca_detail.generate_crl(publisher = self.publisher)
+ ca_detail.generate_manifest(publisher = self.publisher)
+
+ elif (old_resources != new_resources or
+ old_aia != new_aia or
+ (old_resources.valid_until < self.rsn and
+ irdb_resources.valid_until > self.now and
+ old_resources.valid_until != irdb_resources.valid_until)):
+
+ logger.debug("Need to reissue child %s certificate SKI %s",
+ self.child.child_handle, child_cert.cert.gSKI())
+ if old_resources != new_resources:
+ logger.debug("Child %s SKI %s resources changed: old %s new %s",
+ self.child.child_handle, child_cert.cert.gSKI(),
+ old_resources, new_resources)
+ if old_resources.valid_until != irdb_resources.valid_until:
+ logger.debug("Child %s SKI %s validity changed: old %s new %s",
self.child.child_handle, child_cert.cert.gSKI(),
old_resources.valid_until, irdb_resources.valid_until)
- child_cert.sql_delete()
- self.publisher.queue(
- uri = child_cert.uri,
- old_obj = child_cert.cert,
- repository = ca.parent.repository)
- ca_detail.generate_manifest(publisher = self.publisher)
+
+ new_resources.valid_until = irdb_resources.valid_until
+ child_cert.reissue(
+ ca_detail = ca_detail,
+ resources = new_resources,
+ publisher = self.publisher)
+
+ elif old_resources.valid_until < self.now:
+ logger.debug("Child %s certificate SKI %s has expired: cert.valid_until %s, irdb.valid_until %s",
+ self.child.child_handle, child_cert.cert.gSKI(),
+ old_resources.valid_until, irdb_resources.valid_until)
+ child_cert.delete()
+ self.publisher.queue(
+ uri = child_cert.uri,
+ old_obj = child_cert.cert,
+ repository = ca_detail.ca.parent.repository)
+ ca_detail.generate_manifest(publisher = self.publisher)
except (SystemExit, rpki.async.ExitNow):
raise
except Exception, e:
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.lose(e)
else:
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
+ self.rpkid.checkpoint()
self.iterator()
def done(self):
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
+ self.rpkid.checkpoint()
self.publisher.call_pubd(self.exit, self.publication_failed)
def publication_failed(self, e):
logger.exception("Couldn't publish for %s, skipping", self.self_handle)
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.exit()
@@ -349,42 +361,55 @@ class UpdateROAsTask(AbstractTask):
self.publisher = None
self.ca_details = None
self.count = None
+ self.started = False
def start(self):
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
- logger.debug("Self %s[%d] updating ROAs", self.self_handle, self.self_id)
-
+ self.rpkid.checkpoint()
+ logger.debug("Self %s[%r] updating ROAs", self.self_handle, self)
+ assert not self.started
+ self.started = True
logger.debug("Issuing query for ROA requests")
- self.gctx.irdb_query_roa_requests(self.self_handle, self.got_roa_requests, self.roa_requests_failed)
+ self.rpkid.irdb_query_roa_requests(self.self_handle, self.got_roa_requests, self.roa_requests_failed)
def got_roa_requests(self, r_msg):
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
logger.debug("Received response to query for ROA requests")
- if self.gctx.sql.dirty:
- logger.warning("Unexpected dirty SQL cache, flushing")
- self.gctx.sql.sweep()
-
roas = {}
seen = set()
self.orphans = []
self.updates = []
- self.publisher = rpki.rpkid.publication_queue()
+ self.publisher = rpki.rpkid.publication_queue(self.rpkid)
self.ca_details = set()
- for roa in self.roas:
+ logger.debug("UpdateROAsTask.got_roa_requests(): setup done, self.orphans %r", self.orphans)
+ assert isinstance(self.orphans, list) # XXX
+
+ for roa in rpki.rpkidb.models.ROA.objects.filter(self__exact = self.self): # XXX
+ logger.debug("UpdateROAsTask.got_roa_requests(): roa loop, self.orphans %r", self.orphans)
+ assert isinstance(self.orphans, list) # XXX
k = (roa.asn, str(roa.ipv4), str(roa.ipv6))
if k not in roas:
roas[k] = roa
- elif (roa.roa is not None and roa.cert is not None and roa.ca_detail is not None and roa.ca_detail.state == "active" and
- (roas[k].roa is None or roas[k].cert is None or roas[k].ca_detail is None or roas[k].ca_detail.state != "active")):
+ elif (roa.roa is not None and
+ roa.cert is not None and
+ roa.ca_detail is not None and
+ roa.ca_detail.state == "active" and
+ (roas[k].roa is None or
+ roas[k].cert is None or
+ roas[k].ca_detail is None or
+ roas[k].ca_detail.state != "active")):
self.orphans.append(roas[k])
roas[k] = roa
else:
self.orphans.append(roa)
+ logger.debug("UpdateROAsTask.got_roa_requests(): roa loop done, self.orphans %r", self.orphans)
+ assert isinstance(self.orphans, list) # XXX
+
for r_pdu in r_msg:
+ logger.debug("UpdateROAsTask.got_roa_requests(): r_pdu loop, self.orphans %r", self.orphans)
+ assert isinstance(self.orphans, list)
k = (r_pdu.get("asn"), r_pdu.get("ipv4"), r_pdu.get("ipv6"))
if k in seen:
logger.warning("Skipping duplicate ROA request %r", r_pdu)
@@ -392,14 +417,16 @@ class UpdateROAsTask(AbstractTask):
seen.add(k)
roa = roas.pop(k, None)
if roa is None:
- roa = rpki.rpkid.roa_obj(self.gctx, self.self_id, long(r_pdu.get("asn")),
- rpki.resource_set.roa_prefix_set_ipv4(r_pdu.get("ipv4")),
- rpki.resource_set.roa_prefix_set_ipv6(r_pdu.get("ipv6")))
+ roa = rpki.rpkidb.models.ROA(asn = long(r_pdu.get("asn")), ipv4 = r_pdu.get("ipv4"), ipv6 = r_pdu.get("ipv6"))
+ roa.self = self.self
logger.debug("Created new %r", roa)
else:
logger.debug("Found existing %r", roa)
self.updates.append(roa)
+ logger.debug("UpdateROAsTask.got_roa_requests(): r_pdu loop done, self.orphans %r", self.orphans)
+ assert isinstance(self.orphans, list) # XXX
+
self.orphans.extend(roas.itervalues())
if self.overdue:
@@ -412,11 +439,10 @@ class UpdateROAsTask(AbstractTask):
rpki.async.iterator(self.updates, self.loop, self.done, pop_list = True)
def loop(self, iterator, roa):
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
try:
roa.update(publisher = self.publisher, fast = True)
self.ca_details.add(roa.ca_detail)
- self.gctx.sql.sweep()
except (SystemExit, rpki.async.ExitNow):
raise
except rpki.exceptions.NoCoveringCertForROA:
@@ -437,13 +463,12 @@ class UpdateROAsTask(AbstractTask):
logger.debug("Generating new manifest for %r", ca_detail)
ca_detail.generate_manifest(publisher = self.publisher)
self.ca_details.clear()
- self.gctx.sql.sweep()
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.publisher.call_pubd(done, self.publication_failed)
def publication_failed(self, e):
logger.exception("Couldn't publish for %s, skipping", self.self_handle)
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.exit()
def done(self):
@@ -455,8 +480,7 @@ class UpdateROAsTask(AbstractTask):
raise
except Exception:
logger.exception("Could not revoke %r", roa)
- self.gctx.sql.sweep()
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.publish(self.exit)
def roa_requests_failed(self, e):
@@ -476,41 +500,41 @@ class UpdateGhostbustersTask(AbstractTask):
exceptionally silly.
"""
- def start(self):
- self.gctx.checkpoint()
- logger.debug("Self %s[%d] updating Ghostbuster records",
- self.self_handle, self.self_id)
+ def clear(self):
+ self.started = False
- self.gctx.irdb_query_ghostbuster_requests(self.self_handle,
- (p.parent_handle for p in self.parents),
- self.got_ghostbuster_requests,
- self.ghostbuster_requests_failed)
+ def start(self):
+ self.rpkid.checkpoint()
+ logger.debug("Self %s[%r] updating Ghostbuster records", self.self_handle, self)
+ assert not self.started
+ self.started = True
+ parent_handles = set(p.parent_handle for p in rpki.rpkidb.models.Parent.objects.filter(self__exact = self.self))
+ self.rpkid.irdb_query_ghostbuster_requests(self.self_handle, parent_handles,
+ self.got_ghostbuster_requests,
+ self.ghostbuster_requests_failed)
def got_ghostbuster_requests(self, r_msg):
try:
- self.gctx.checkpoint()
- if self.gctx.sql.dirty:
- logger.warning("Unexpected dirty SQL cache, flushing")
- self.gctx.sql.sweep()
+ self.rpkid.checkpoint()
ghostbusters = {}
orphans = []
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(self.rpkid)
ca_details = set()
seen = set()
- parents = dict((p.parent_handle, p) for p in self.parents)
-
- for ghostbuster in self.ghostbusters:
- k = (ghostbuster.ca_detail_id, ghostbuster.vcard)
+ for ghostbuster in rpki.rpkidb.models.Ghostbuster.objects.filter(self__exact = self.self):
+ k = (ghostbuster.ca_detail.pk, ghostbuster.vcard)
if ghostbuster.ca_detail.state != "active" or k in ghostbusters:
orphans.append(ghostbuster)
else:
ghostbusters[k] = ghostbuster
for r_pdu in r_msg:
- if r_pdu.get("parent_handle") not in parents:
+ try:
+ rpki.rpkidb.models.Parent.objects.get(self__exact = self.self, parent_handle = r_pdu.get("parent_handle"))
+ except rpki.rpkidb.models.Parent.DoesNotExist:
logger.warning("Unknown parent_handle %r in Ghostbuster request, skipping", r_pdu.get("parent_handle"))
continue
k = (r_pdu.get("parent_handle"), r_pdu.text)
@@ -518,17 +542,17 @@ class UpdateGhostbustersTask(AbstractTask):
logger.warning("Skipping duplicate Ghostbuster request %r", r_pdu)
continue
seen.add(k)
- for ca in parents[r_pdu.get("parent_handle")].cas:
- ca_detail = ca.active_ca_detail
- if ca_detail is not None:
- ghostbuster = ghostbusters.pop((ca_detail.ca_detail_id, r_pdu.text), None)
- if ghostbuster is None:
- ghostbuster = rpki.rpkid.ghostbuster_obj(self.gctx, self.self_id, ca_detail.ca_detail_id, r_pdu.text)
- logger.debug("Created new %r for %r", ghostbuster, r_pdu.get("parent_handle"))
- else:
- logger.debug("Found existing %r for %s", ghostbuster, r_pdu.get("parent_handle"))
- ghostbuster.update(publisher = publisher, fast = True)
- ca_details.add(ca_detail)
+ for ca_detail in rpki.rpkidb.models.CADetail.objects.filter(ca__parent__parent_handle = r_pdu.get("parent_handle"),
+ ca__parent__self = self.self, state = "active"):
+ ghostbuster = ghostbusters.pop((ca_detail.pk, r_pdu.text), None)
+ if ghostbuster is None:
+ ghostbuster = rpki.rpkidb.models.Ghostbuster(ca_detail = ca_detail, vcard = r_pdu.text)
+ ghostbuster.self = self.self
+ logger.debug("Created new %r for %r", ghostbuster, r_pdu.get("parent_handle"))
+ else:
+ logger.debug("Found existing %r for %s", ghostbuster, r_pdu.get("parent_handle"))
+ ghostbuster.update(publisher = publisher, fast = True)
+ ca_details.add(ca_detail)
orphans.extend(ghostbusters.itervalues())
for ghostbuster in orphans:
@@ -539,9 +563,7 @@ class UpdateGhostbustersTask(AbstractTask):
ca_detail.generate_crl(publisher = publisher)
ca_detail.generate_manifest(publisher = publisher)
- self.gctx.sql.sweep()
-
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
publisher.call_pubd(self.exit, self.publication_failed)
except (SystemExit, rpki.async.ExitNow):
@@ -552,7 +574,7 @@ class UpdateGhostbustersTask(AbstractTask):
def publication_failed(self, e):
logger.exception("Couldn't publish Ghostbuster updates for %s, skipping", self.self_handle)
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.exit()
def ghostbuster_requests_failed(self, e):
@@ -569,26 +591,27 @@ class UpdateEECertificatesTask(AbstractTask):
so keeping it simple for initial version, we can optimize later.
"""
- def start(self):
- self.gctx.checkpoint()
- logger.debug("Self %s[%d] updating EE certificates", self.self_handle, self.self_id)
+ def clear(self):
+ self.started = False
- self.gctx.irdb_query_ee_certificate_requests(self.self_handle,
+ def start(self):
+ self.rpkid.checkpoint()
+ logger.debug("Self %s[%r] updating EE certificates", self.self_handle, self)
+ assert not self.started
+ self.started = True
+ self.rpkid.irdb_query_ee_certificate_requests(self.self_handle,
self.got_requests,
self.get_requests_failed)
def got_requests(self, r_msg):
try:
- self.gctx.checkpoint()
- if self.gctx.sql.dirty:
- logger.warning("Unexpected dirty SQL cache, flushing")
- self.gctx.sql.sweep()
+ self.rpkid.checkpoint()
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(self.rpkid)
existing = dict()
- for ee in self.ee_certificates:
+ for ee in rpki.rpkidb.models.EECertificate.objects.filter(self__exact = self.self): # XXX
gski = ee.gski
if gski not in existing:
existing[gski] = set()
@@ -626,7 +649,7 @@ class UpdateEECertificatesTask(AbstractTask):
for ca_detail in covering:
logger.debug("No existing EE certificate for %s %s",
gski, resources)
- rpki.rpkid.ee_cert_obj.create(
+ rpki.rpkidb.models.EECertificate.create( # sic: class method, not Django manager method (for now, anyway)
ca_detail = ca_detail,
subject_name = subject_name,
subject_key = subject_key,
@@ -640,15 +663,11 @@ class UpdateEECertificatesTask(AbstractTask):
ca_details.add(ee.ca_detail)
ee.revoke(publisher = publisher)
- self.gctx.sql.sweep()
-
for ca_detail in ca_details:
ca_detail.generate_crl(publisher = publisher)
ca_detail.generate_manifest(publisher = publisher)
- self.gctx.sql.sweep()
-
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
publisher.call_pubd(self.exit, self.publication_failed)
except (SystemExit, rpki.async.ExitNow):
@@ -659,7 +678,7 @@ class UpdateEECertificatesTask(AbstractTask):
def publication_failed(self, e):
logger.exception("Couldn't publish EE certificate updates for %s, skipping", self.self_handle)
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.exit()
def get_requests_failed(self, e):
@@ -680,38 +699,48 @@ class RegenerateCRLsAndManifestsTask(AbstractTask):
database anyway.
"""
- def start(self):
- self.gctx.checkpoint()
- logger.debug("Self %s[%d] regenerating CRLs and manifests",
- self.self_handle, self.self_id)
+ def clear(self):
+ self.started = False
+ def start(self):
+ self.rpkid.checkpoint()
+ logger.debug("Self %s[%r] regenerating CRLs and manifests", self.self_handle, self)
+ assert not self.started
+ self.started = True
now = rpki.sundial.now()
crl_interval = rpki.sundial.timedelta(seconds = self.crl_interval)
- regen_margin = max(self.gctx.cron_period * 2, crl_interval / 4)
- publisher = rpki.rpkid.publication_queue()
+ regen_margin = max(self.rpkid.cron_period * 2, crl_interval / 4)
+ publisher = rpki.rpkid.publication_queue(self.rpkid)
- for parent in self.parents:
- for ca in parent.cas:
- try:
- for ca_detail in ca.revoked_ca_details:
- if now > ca_detail.latest_crl.getNextUpdate():
- ca_detail.destroy(ca = ca, publisher = publisher)
- for ca_detail in ca.active_or_deprecated_ca_details:
- if now + regen_margin > ca_detail.latest_crl.getNextUpdate():
- ca_detail.generate_crl(publisher = publisher)
- ca_detail.generate_manifest(publisher = publisher)
- except (SystemExit, rpki.async.ExitNow):
- raise
- except Exception:
- logger.exception("Couldn't regenerate CRLs and manifests for CA %r, skipping", ca)
-
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
- publisher.call_pubd(self.exit, self.lose)
+ logger.debug("RegenerateCRLsAndManifestsTask: setup complete") # XXX
+
+ for ca in rpki.rpkidb.models.CA.objects.filter(parent__self = self.self):
+ logger.debug("RegenerateCRLsAndManifestsTask: checking CA %r", ca) # XXX
+ try:
+ for ca_detail in ca.ca_details.filter(state = "revoked"):
+ if now > ca_detail.latest_crl.getNextUpdate():
+ ca_detail.destroy(ca = ca, publisher = publisher)
+ for ca_detail in ca.ca_details.filter(state__in = ("active", "deprecated")):
+ if now + regen_margin > ca_detail.latest_crl.getNextUpdate():
+ ca_detail.generate_crl(publisher = publisher)
+ ca_detail.generate_manifest(publisher = publisher)
+ except (SystemExit, rpki.async.ExitNow):
+ raise
+ except Exception:
+ logger.exception("Couldn't regenerate CRLs and manifests for CA %r, skipping", ca)
+
+ logger.debug("RegenerateCRLsAndManifestsTask: CA loop complete") # XXX
+
+ self.rpkid.checkpoint()
+ publisher.call_pubd(self.done, self.lose)
+
+ def done(self):
+ logger.debug("RegenerateCRLsAndManifestsTask: publication complete") # XXX
+ self.exit()
def lose(self, e):
logger.exception("Couldn't publish updated CRLs and manifests for self %r, skipping", self.self_handle)
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
self.exit()
@@ -722,18 +751,24 @@ class CheckFailedPublication(AbstractTask):
to pubd being down or unreachable).
"""
+ def clear(self):
+ self.started = False
+
def start(self):
- publisher = rpki.rpkid.publication_queue()
- for parent in self.parents:
- for ca in parent.cas:
- ca_detail = ca.active_ca_detail
- if ca_detail is not None:
- ca_detail.check_failed_publication(publisher)
- self.gctx.checkpoint()
- self.gctx.sql.sweep()
- publisher.call_pubd(self.exit, self.publication_failed)
+ assert not self.started
+ logger.debug("CheckFailedPublication starting")
+ self.started = True
+ publisher = rpki.rpkid.publication_queue(self.rpkid)
+ for ca_detail in rpki.rpkidb.models.CADetail.objects.filter(ca__parent__self = self.self, state = "active"):
+ ca_detail.check_failed_publication(publisher)
+ self.rpkid.checkpoint()
+ publisher.call_pubd(self.done, self.publication_failed)
def publication_failed(self, e):
logger.exception("Couldn't publish for %s, skipping", self.self_handle)
- self.gctx.checkpoint()
+ self.rpkid.checkpoint()
+ self.exit()
+
+ def done(self):
+ logger.debug("CheckFailedPublication finished")
self.exit()
diff --git a/rpki/rpkidb/migrations/0003_auto_20151018_1600.py b/rpki/rpkidb/migrations/0003_auto_20151018_1600.py
new file mode 100644
index 00000000..c730ab1a
--- /dev/null
+++ b/rpki/rpkidb/migrations/0003_auto_20151018_1600.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import rpki.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('rpkidb', '0002_auto_20151015_2213'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='bsc',
+ name='hash_alg',
+ field=rpki.fields.EnumField(default='sha256', choices=[(1, 'sha256')]),
+ ),
+ migrations.AlterField(
+ model_name='cadetail',
+ name='manifest_public_key',
+ field=rpki.fields.PublicKeyField(default=None, serialize=False, null=True, blank=True),
+ ),
+ migrations.AlterField(
+ model_name='cadetail',
+ name='public_key',
+ field=rpki.fields.PublicKeyField(default=None, serialize=False, null=True, blank=True),
+ ),
+ ]
diff --git a/rpki/rpkidb/migrations/0004_auto_20151018_1602.py b/rpki/rpkidb/migrations/0004_auto_20151018_1602.py
new file mode 100644
index 00000000..15942ae7
--- /dev/null
+++ b/rpki/rpkidb/migrations/0004_auto_20151018_1602.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import rpki.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('rpkidb', '0003_auto_20151018_1600'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='bsc',
+ name='private_key_id',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, blank=True),
+ ),
+ migrations.AlterField(
+ model_name='cadetail',
+ name='manifest_private_key_id',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, null=True, blank=True),
+ ),
+ migrations.AlterField(
+ model_name='cadetail',
+ name='private_key_id',
+ field=rpki.fields.RSAPrivateKeyField(default=None, serialize=False, null=True, blank=True),
+ ),
+ ]
diff --git a/rpki/rpkidb/migrations/0005_auto_20151018_1613.py b/rpki/rpkidb/migrations/0005_auto_20151018_1613.py
new file mode 100644
index 00000000..04d86ebb
--- /dev/null
+++ b/rpki/rpkidb/migrations/0005_auto_20151018_1613.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import rpki.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('rpkidb', '0004_auto_20151018_1602'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='EECertificate',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('ski', rpki.fields.BlobField(default=None, serialize=False, blank=True)),
+ ('cert', rpki.fields.CertificateField(default=None, serialize=False, blank=True)),
+ ('published', rpki.fields.SundialField(null=True)),
+ ('ca_detail', models.ForeignKey(related_name='ee_certificatess', to='rpkidb.CADetail')),
+ ('self', models.ForeignKey(related_name='ee_certificatess', to='rpkidb.Self')),
+ ],
+ ),
+ migrations.RemoveField(
+ model_name='eecert',
+ name='ca_detail',
+ ),
+ migrations.RemoveField(
+ model_name='eecert',
+ name='self',
+ ),
+ migrations.DeleteModel(
+ name='EECert',
+ ),
+ ]
diff --git a/rpki/rpkidb/models.py b/rpki/rpkidb/models.py
index 0a4346e0..d17b6fae 100644
--- a/rpki/rpkidb/models.py
+++ b/rpki/rpkidb/models.py
@@ -12,10 +12,11 @@ from django.db import models
import rpki.left_right
from rpki.fields import (EnumField, SundialField, BlobField,
- CertificateField, KeyField, CRLField, PKCS10Field,
+ CertificateField, RSAPrivateKeyField,
+ PublicKeyField, CRLField, PKCS10Field,
ManifestField, ROAField, GhostbusterField)
-from lxml.etree import Element, SubElement
+from lxml.etree import Element, SubElement, tostring as ElementToString
logger = logging.getLogger(__name__)
@@ -59,15 +60,17 @@ class XMLTemplate(object):
self.readonly = readonly
- def encode(self, obj, r_msg):
+ def encode(self, obj, q_pdu, r_msg):
"""
Encode an ORM object as XML.
"""
- r_pdu = SubElement(r_msg, rpki.left_right.xmlns + self.name, nsmap = rpki.left_right.nsmap)
- r_pdu.set(self.name + "_handle", getattr(obj, self.name + "_handle"))
+ r_pdu = SubElement(r_msg, rpki.left_right.xmlns + self.name, nsmap = rpki.left_right.nsmap, action = q_pdu.get("action"))
if self.name != "self":
- r_pdu.set("self_handle", getattr(obj, "self_handle"))
+ r_pdu.set("self_handle", obj.self.self_handle)
+ r_pdu.set(self.name + "_handle", getattr(obj, self.name + "_handle"))
+ if q_pdu.get("tag"):
+ r_pdu.set("tag", q_pdu.get("tag"))
for h in self.handles:
k = h.xml_template.name
v = getattr(obj, k)
@@ -84,6 +87,7 @@ class XMLTemplate(object):
v = getattr(obj, k)
if v is not None and not v.empty():
SubElement(r_pdu, rpki.left_right.xmlns + k).text = v.get_Base64()
+ logger.debug("XMLTemplate.encode(): %s", ElementToString(r_pdu))
def acknowledge(self, obj, q_pdu, r_msg):
@@ -98,13 +102,17 @@ class XMLTemplate(object):
"""
assert q_pdu.tag == rpki.left_right.xmlns + self.name
- r_pdu = SubElement(r_msg, rpki.left_right.xmlns + self.name, nsmap = rpki.left_right.nsmap)
- r_pdu.set(self.name + "_handle", getattr(obj, self.name + "_handle"))
+ action = q_pdu.get("action")
+ r_pdu = SubElement(r_msg, rpki.left_right.xmlns + self.name, nsmap = rpki.left_right.nsmap, action = action)
if self.name != "self":
- r_pdu.set("self_handle", getattr(obj, "self_handle"))
- if self.name == "bsc" and q_pdu.get("action") != "destroy" and obj.pkcs11_request is not None:
- assert not obj.pkcs11_request.empty()
- SubElement(r_pdu, rpki.left_right.xmlns + "pkcs11_request").text = obj.pkcs11_request.get_Base64()
+ r_pdu.set("self_handle", obj.self.self_handle)
+ r_pdu.set(self.name + "_handle", getattr(obj, self.name + "_handle"))
+ if q_pdu.get("tag"):
+ r_pdu.set("tag", q_pdu.get("tag"))
+ if self.name == "bsc" and action != "destroy" and obj.pkcs10_request is not None:
+ assert not obj.pkcs10_request.empty()
+ SubElement(r_pdu, rpki.left_right.xmlns + "pkcs10_request").text = obj.pkcs10_request.get_Base64()
+ logger.debug("XMLTemplate.acknowledge(): %s", ElementToString(r_pdu))
def decode(self, obj, q_pdu):
@@ -112,12 +120,13 @@ class XMLTemplate(object):
Decode XML into an ORM object.
"""
+ logger.debug("XMLTemplate.decode(): %r %s", obj, ElementToString(q_pdu))
assert q_pdu.tag == rpki.left_right.xmlns + self.name
for h in self.handles:
k = h.xml_template.name
v = q_pdu.get(k + "_handle")
if v is not None:
- setattr(obj, k, h.objects.get(**{k + "_handle" : v, "self" : obj.self}))
+ setattr(obj, k, h.objects.get(**{k + "_handle" : v, "self__exact" : obj.self}))
for k in self.attributes:
v = q_pdu.get(k)
if v is not None:
@@ -144,18 +153,22 @@ class XMLManager(models.Manager): # pylint: disable=W0232
holding an XMLTemplate object (above).
"""
+ # Additional complication: "self" is a bad keyword argument, which
+ # requires a two-step process.
def xml_get_or_create(self, xml):
name = self.model.xml_template.name
action = xml.get("action")
assert xml.tag == rpki.left_right.xmlns + name and action in ("create", "set")
d = { name + "_handle" : xml.get(name + "_handle") }
- if name != "self" and action == "create":
- d["self"] = Self.objects.get(self_handle = xml.get("self_handle"))
- elif name != "self":
+ if name != "self" and action != "create":
d["self__self_handle"] = xml.get("self_handle")
- return self.model(**d) if action == "create" else self.get(**d)
-
+ logger.debug("XMLManager.xml_get_or_create(): name %s action %s filter %r", name, action, d)
+ result = self.model(**d) if action == "create" else self.get(**d)
+ if name != "self" and action == "create":
+ result.self = Self.objects.get(self_handle = xml.get("self_handle"))
+ logger.debug("XMLManager.xml_get_or_create(): name %s action %s filter %r result %r", name, action, d, result)
+ return result
def xml_list(self, xml):
name = self.model.xml_template.name
@@ -166,8 +179,10 @@ class XMLManager(models.Manager): # pylint: disable=W0232
d[name + "_handle"] = xml.get(name + "_handle")
if name != "self":
d["self__self_handle"] = xml.get("self_handle")
- return self.filter(**d) if d else self.all()
-
+ logger.debug("XMLManager.xml_list(): name %s action %s filter %r", name, action, d)
+ result = self.filter(**d) if d else self.all()
+ logger.debug("XMLManager.xml_list(): name %s action %s filter %r result %r", name, action, d, result)
+ return result
def xml_get_for_delete(self, xml):
name = self.model.xml_template.name
@@ -176,10 +191,43 @@ class XMLManager(models.Manager): # pylint: disable=W0232
d = { name + "_handle" : xml.get(name + "_handle") }
if name != "self":
d["self__self_handle"] = xml.get("self_handle")
- return self.get(**d)
+ logger.debug("XMLManager.xml_get_for_delete(): name %s action %s filter %r", name, action, d)
+ result = self.get(**d)
+ logger.debug("XMLManager.xml_get_for_delete(): name %s action %s filter %r result %r", name, action, d, result)
+ return result
+
+
+def xml_hooks(cls):
+ """
+ Class decorator to add default XML hooks.
+ """
+
+ # Maybe inheritance from an abstract model would work here. Then
+ # again, maybe we could use this decorator to do something prettier
+ # for the XMLTemplate setup. Whatever. Clean up once basic stuff
+ # works again after transition from pre-Django SQL.
+
+ def default_xml_post_save_hook(self, rpkid, q_pdu, cb, eb):
+ logger.debug("default_xml_post_save_hook()")
+ cb()
+ def default_xml_pre_delete_hook(self, rpkid, cb, eb):
+ logger.debug("default_xml_pre_delete_hook()")
+ cb()
+ def default_xml_pre_save_hook(self, q_pdu):
+ logger.debug("default_xml_pre_save_hook()")
+ pass # pylint: disable=W0107
+ for name, method in (("xml_post_save_hook", default_xml_post_save_hook),
+ ("xml_pre_delete_hook", default_xml_pre_delete_hook),
+ ("xml_pre_save_hook", default_xml_pre_save_hook)):
+ if not hasattr(cls, name):
+ setattr(cls, name, method)
+
+ return cls
+
# Models
+@xml_hooks
class Self(models.Model):
self_handle = models.SlugField(max_length = 255)
use_hsm = models.BooleanField(default = False)
@@ -196,11 +244,13 @@ class Self(models.Model):
elements = ("bpki_cert", "bpki_glue"))
- def xml_pre_delete_hook(self):
- raise NotImplementedError
+ def xml_pre_delete_hook(self, rpkid, cb, eb):
+ def loop(iterator, parent):
+ parent.destroy(iterator)
+ rpki.async.iterator(self.parents.all(), loop, cb)
- def xml_post_save_hook(self, q_pdu, cb, eb):
+ def xml_post_save_hook(self, rpkid, q_pdu, cb, eb):
if q_pdu.get("clear_replay_protection"):
for parent in self.parents.all():
parent.clear_replay_protection()
@@ -228,12 +278,12 @@ class Self(models.Model):
if q_pdu.get("run_now"):
actions.append(self.serve_run_now)
def loop(iterator, action):
- action(iterator, eb)
+ action(rpkid, iterator, eb)
rpki.async.iterator(actions, loop, cb)
- def serve_publish_world_now(self, cb, eb):
- publisher = rpki.rpkid.publication_queue()
+ def serve_publish_world_now(self, rpkid, cb, eb):
+ publisher = rpki.rpkid.publication_queue(rpkid)
repositories = set()
objects = dict()
@@ -252,7 +302,7 @@ class Self(models.Model):
assert r_pdu.get("uri") not in objects
objects[r_pdu.get("uri")] = (r_pdu.get("hash"), repository)
- repository.call_pubd(iterator, eb, q_msg, length_check = False, handlers = dict(list = list_handler))
+ repository.call_pubd(rpkid, iterator, eb, q_msg, length_check = False, handlers = dict(list = list_handler))
def reconcile(uri, obj, repository):
h, r = objects.pop(uri, (None, None))
@@ -281,21 +331,21 @@ class Self(models.Model):
rpki.async.iterator(self.parents.all(), loop, done)
- def serve_run_now(self, cb, eb):
- logger.debug("Forced immediate run of periodic actions for self %s[%d]", self.self_handle, self.self_id)
+ def serve_run_now(self, rpkid, cb, eb):
+ logger.debug("Forced immediate run of periodic actions for self %s[%r]", self.self_handle, self)
completion = rpki.rpkid_tasks.CompletionHandler(cb)
- self.schedule_cron_tasks(completion)
+ self.schedule_cron_tasks(rpkid, completion)
assert completion.count > 0
- self.gctx.task_run()
+ rpkid.task_run()
- def schedule_cron_tasks(self, completion):
+ def schedule_cron_tasks(self, rpkid, completion):
try:
tasks = self.cron_tasks
except AttributeError:
- tasks = self.cron_tasks = tuple(task(self) for task in rpki.rpkid_tasks.task_classes)
+ tasks = self.cron_tasks = tuple(task(rpkid, self) for task in rpki.rpkid_tasks.task_classes)
for task in tasks:
- self.gctx.task_add(task)
+ rpkid.task_add(task)
completion.register(task)
@@ -316,11 +366,12 @@ class Self(models.Model):
if ca_detail.covers(resources))
+@xml_hooks
class BSC(models.Model):
bsc_handle = models.SlugField(max_length = 255)
- private_key_id = KeyField()
+ private_key_id = RSAPrivateKeyField()
pkcs10_request = PKCS10Field()
- hash_alg = EnumField(choices = ("sha256",))
+ hash_alg = EnumField(choices = ("sha256",), default = "sha256")
signing_cert = CertificateField(null = True)
signing_cert_crl = CRLField(null = True)
self = models.ForeignKey(Self, related_name = "bscs")
@@ -343,6 +394,7 @@ class BSC(models.Model):
self.pkcs10_request = rpki.x509.PKCS10.create(keypair = self.private_key_id)
+@xml_hooks
class Repository(models.Model):
repository_handle = models.SlugField(max_length = 255)
peer_contact_uri = models.TextField(null = True)
@@ -364,7 +416,7 @@ class Repository(models.Model):
elements = ("bpki_cert", "bpki_glue"))
- def xml_post_save_hook(self, q_pdu, cb, eb):
+ def xml_post_save_hook(self, rpkid, q_pdu, cb, eb):
if q_pdu.get("clear_replay_protection"):
self.clear_replay_protection()
cb()
@@ -375,7 +427,7 @@ class Repository(models.Model):
self.save()
- def call_pubd(self, callback, errback, q_msg, handlers = {}, length_check = True): # pylint: disable=W0102
+ def call_pubd(self, rpkid, callback, errback, q_msg, handlers = {}, length_check = True): # pylint: disable=W0102
"""
Send a message to publication daemon and return the response.
@@ -398,7 +450,7 @@ class Repository(models.Model):
bsc = self.bsc
q_der = rpki.publication.cms_msg().wrap(q_msg, bsc.private_key_id, bsc.signing_cert, bsc.signing_cert_crl)
- bpki_ta_path = (self.gctx.bpki_ta, self.self.bpki_cert, self.self.bpki_glue, self.bpki_cert, self.bpki_glue)
+ bpki_ta_path = (rpkid.bpki_ta, self.self.bpki_cert, self.self.bpki_glue, self.bpki_cert, self.bpki_glue)
def done(r_der):
try:
@@ -432,6 +484,7 @@ class Repository(models.Model):
errback(e)
+@xml_hooks
class Parent(models.Model):
parent_handle = models.SlugField(max_length = 255)
bpki_cert = CertificateField(null = True)
@@ -456,11 +509,11 @@ class Parent(models.Model):
elements = ("bpki_cert", "bpki_glue"))
- def xml_pre_delete_hook(self, cb, eb):
- self.destroy(cb, delete_parent = False)
+ def xml_pre_delete_hook(self, rpkid, cb, eb):
+ self.destroy(rpkid, cb, delete_parent = False)
- def xml_post_save_hook(self, q_pdu, cb, eb):
+ def xml_post_save_hook(self, rpkid, q_pdu, cb, eb):
if q_pdu.get("clear_replay_protection"):
self.clear_replay_protection()
actions = []
@@ -477,19 +530,19 @@ class Parent(models.Model):
rpki.async.iterator(actions, loop, cb)
- def serve_rekey(self, cb, eb):
+ def serve_rekey(self, rpkid, cb, eb):
def loop(iterator, ca):
ca.rekey(iterator, eb)
rpki.async.iterator(self.cas.all(), loop, cb)
- def serve_revoke(self, cb, eb):
+ def serve_revoke(self, rpkid, cb, eb):
def loop(iterator, ca):
ca.revoke(cb = iterator, eb = eb)
rpki.async.iterator(self.cas.all(), loop, cb)
- def serve_reissue(self, cb, eb):
+ def serve_reissue(self, rpkid, cb, eb):
def loop(iterator, ca):
ca.reissue(cb = iterator, eb = eb)
rpki.async.iterator(self.cas.all(), loop, cb)
@@ -500,7 +553,7 @@ class Parent(models.Model):
self.save()
- def get_skis(self, cb, eb):
+ def get_skis(self, rpkid, cb, eb):
"""
Fetch SKIs that this parent thinks we have. In theory this should
agree with our own database, but in practice stuff can happen, so
@@ -515,10 +568,10 @@ class Parent(models.Model):
set(rpki.x509.X509(Base64 = c.text).gSKI()
for c in rc.getiterator(rpki.up_down.tag_certificate)))
for rc in r_msg.getiterator(rpki.up_down.tag_class)))
- self.up_down_list_query(done, eb)
+ self.up_down_list_query(rpkid = rpkid, cb = done, eb = eb)
- def revoke_skis(self, rc_name, skis_to_revoke, cb, eb):
+ def revoke_skis(self, rpkid, rc_name, skis_to_revoke, cb, eb):
"""
Revoke a set of SKIs within a particular resource class.
"""
@@ -527,11 +580,11 @@ class Parent(models.Model):
def revoked(r_pdu):
iterator()
logger.debug("Asking parent %r to revoke class %r, SKI %s", self, rc_name, ski)
- self.up_down_revoke_query(rc_name, ski, revoked, eb)
+ self.up_down_revoke_query(rpkid = rpkid, class_name = rc_name, ski = ski, cb = revoked, eb = eb)
rpki.async.iterator(skis_to_revoke, loop, cb)
- def serve_revoke_forgotten(self, cb, eb):
+ def serve_revoke_forgotten(self, rpkid, cb, eb):
"""
Handle a left-right revoke_forgotten action for this parent.
@@ -551,13 +604,13 @@ class Parent(models.Model):
if rc_name in ca_map:
for ca_detail in ca_map[rc_name].issue_response_candidate_ca_details:
skis_to_revoke.discard(ca_detail.latest_ca_cert.gSKI())
- self.revoke_skis(rc_name, skis_to_revoke, iterator, eb)
+ self.revoke_skis(rpkid, rc_name, skis_to_revoke, iterator, eb)
ca_map = dict((ca.parent_resource_class, ca) for ca in self.cas.all())
rpki.async.iterator(skis_from_parent.items(), loop, cb)
- self.get_skis(got_skis, eb)
+ self.get_skis(rpkid, got_skis, eb)
- def destroy(self, cb, delete_parent = True):
+ def destroy(self, rpkid, cb, delete_parent = True):
"""
Delete all the CA stuff under this parent, and perhaps the parent
itself.
@@ -566,7 +619,7 @@ class Parent(models.Model):
def loop(iterator, ca):
ca.destroy(self, iterator)
def revoke():
- self.serve_revoke_forgotten(done, fail)
+ self.serve_revoke_forgotten(rpkid, done, fail)
def fail(e):
logger.warning("Trouble getting parent to revoke certificates, blundering onwards: %s", e)
done()
@@ -582,12 +635,14 @@ class Parent(models.Model):
sender = self.sender_name, recipient = self.recipient_name, type = query_type)
- def up_down_list_query(self, cb, eb):
+ def up_down_list_query(self, rpkid, cb, eb):
q_msg = self._compose_up_down_query("list")
- self.query_up_down(q_msg, cb, eb)
+ self.query_up_down(rpkid, q_msg, cb, eb)
- def up_down_issue_query(self, ca, ca_detail, cb, eb):
+ def up_down_issue_query(self, rpkid, ca, ca_detail, cb, eb):
+ logger.debug("Parent.up_down_issue_query(): caRepository %r rpkiManifest %r rpkiNotify %r",
+ ca.sia_uri, ca_detail.manifest_uri, ca.parent.repository.rrdp_notification_uri)
pkcs10 = rpki.x509.PKCS10.create(
keypair = ca_detail.private_key_id,
is_ca = True,
@@ -597,16 +652,16 @@ class Parent(models.Model):
q_msg = self._compose_up_down_query("issue")
q_pdu = SubElement(q_msg, rpki.up_down.tag_request, class_name = ca.parent_resource_class)
q_pdu.text = pkcs10.get_Base64()
- self.query_up_down(q_msg, cb, eb)
+ self.query_up_down(rpkid, q_msg, cb, eb)
- def up_down_revoke_query(self, class_name, ski, cb, eb):
+ def up_down_revoke_query(self, rpkid, class_name, ski, cb, eb):
q_msg = self._compose_up_down_query("revoke")
SubElement(q_msg, rpki.up_down.tag_key, class_name = class_name, ski = ski)
- self.query_up_down(q_msg, cb, eb)
+ self.query_up_down(rpkid, q_msg, cb, eb)
- def query_up_down(self, q_msg, cb, eb):
+ def query_up_down(self, rpkid, q_msg, cb, eb):
if self.bsc is None:
raise rpki.exceptions.BSCNotFound("Could not find BSC")
@@ -622,7 +677,7 @@ class Parent(models.Model):
def unwrap(r_der):
try:
r_cms = rpki.up_down.cms_msg(DER = r_der)
- r_msg = r_cms.unwrap((self.gctx.bpki_ta,
+ r_msg = r_cms.unwrap((rpkid.bpki_ta,
self.self.bpki_cert,
self.self.bpki_glue,
self.bpki_cert,
@@ -637,6 +692,8 @@ class Parent(models.Model):
else:
cb(r_msg)
+ logger.debug("query_up_down(): type(q_der) %r", type(q_der)) # XXX
+
rpki.http.client(
msg = q_der,
url = self.peer_contact_uri,
@@ -692,7 +749,7 @@ class CA(models.Model):
#def issue_response_candidate_ca_details(self): return self.ca_details.exclude(state = "revoked")
- def check_for_updates(self, parent, rc, cb, eb):
+ def check_for_updates(self, rpkid, parent, rc, cb, eb):
"""
Parent has signaled continued existance of a resource class we
already knew about, so we need to check for an updated
@@ -700,12 +757,12 @@ class CA(models.Model):
with the same key, etc.
"""
+ logger.debug("check_for_updates()")
sia_uri = parent.construct_sia_uri(rc)
sia_uri_changed = self.sia_uri != sia_uri
if sia_uri_changed:
logger.debug("SIA changed: was %s now %s", self.sia_uri, sia_uri)
self.sia_uri = sia_uri
- self.sql_mark_dirty()
class_name = rc.get("class_name")
rc_resources = rpki.resource_set.resource_bag(
rc.get("resource_set_as"),
@@ -723,7 +780,7 @@ class CA(models.Model):
logger.warning("SKI %s in resource class %s is in database but missing from list_response to %s from %s, "
"maybe parent certificate went away?",
ca_detail.public_key.gSKI(), class_name, parent.self.self_handle, parent.parent_handle)
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
ca_detail.destroy(ca = ca_detail.ca, publisher = publisher)
return publisher.call_pubd(iterator, eb)
if ca_detail.state == "active" and ca_detail.ca_cert_uri != rc_cert_uri:
@@ -743,6 +800,7 @@ class CA(models.Model):
current_resources.undersized(rc_resources) or
current_resources.oversized(rc_resources)):
return ca_detail.update(
+ rpkid = rpkid,
parent = parent,
ca = self,
rc = rc,
@@ -762,14 +820,14 @@ class CA(models.Model):
else:
logger.warning("Existing resource class %s to %s from %s with no certificates, rekeying",
class_name, parent.self.self_handle, parent.parent_handle)
- self.rekey(cb, eb)
+ self.rekey(rpkid, cb, eb)
# Called from exactly one place, in rpki.rpkid_tasks.PollParentTask.class_loop().
# Might want to refactor.
@classmethod
- def create(cls, parent, rc, cb, eb):
+ def create(cls, rpkid, parent, rc, cb, eb):
"""
Parent has signaled existance of a new resource class, so we need
to create and set up a corresponding CA object.
@@ -783,16 +841,17 @@ class CA(models.Model):
c = r_msg[0][0]
logger.debug("CA %r received certificate %s", self, c.get("cert_url"))
ca_detail.activate(
+ rpkid = rpkid,
ca = self,
cert = rpki.x509.X509(Base64 = c.text),
uri = c.get("cert_url"),
callback = cb,
errback = eb)
logger.debug("Sending issue request to %r from %r", parent, self.create)
- parent.up_down_issue_query(self, ca_detail, done, eb)
+ parent.up_down_issue_query(rpkid = rpkid, ca = self, ca_detail = ca_detail, cb = done, eb = eb)
- def destroy(self, parent, callback):
+ def destroy(self, rpkid, parent, callback):
"""
The list of current resource classes received from parent does not
include the class corresponding to this CA, so we need to delete
@@ -811,7 +870,7 @@ class CA(models.Model):
logger.debug("Deleting %r", self)
self.delete()
callback()
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
for ca_detail in self.ca_details.all():
ca_detail.destroy(ca = self, publisher = publisher, allow_failure = True)
publisher.call_pubd(done, lose)
@@ -847,7 +906,7 @@ class CA(models.Model):
return self.last_crl_sn
- def rekey(self, cb, eb):
+ def rekey(self, rpkid, cb, eb):
"""
Initiate a rekey operation for this CA. Generate a new keypair.
Request cert from parent using new keypair. Mark result as our
@@ -855,20 +914,27 @@ class CA(models.Model):
the new ca_detail.
"""
- old_detail = self.ca_details.get(state = "active")
- new_detail = CADetail.create(self)
+ try:
+ old_detail = self.ca_details.get(state = "active")
+ except CADetail.DoesNotExist:
+ old_detail = None
+
+ new_detail = CADetail.create(ca = self) # sic: class method, not manager function (for now, anyway)
+
def done(r_msg):
c = r_msg[0][0]
logger.debug("CA %r received certificate %s", self, c.get("cert_url"))
new_detail.activate(
+ rpkid = rpkid,
ca = self,
cert = rpki.x509.X509(Base64 = c.text),
uri = c.get("cert_url"),
predecessor = old_detail,
callback = cb,
errback = eb)
+
logger.debug("Sending issue request to %r from %r", self.parent, self.rekey)
- self.parent.up_down_issue_query(self, new_detail, done, eb)
+ self.parent.up_down_issue_query(rpkid = rpkid, ca = self, ca_detail = new_detail, cb = done, eb = eb)
def revoke(self, cb, eb, revoke_all = False):
@@ -896,13 +962,13 @@ class CA(models.Model):
class CADetail(models.Model):
- public_key = KeyField(null = True)
- private_key_id = KeyField(null = True)
+ public_key = PublicKeyField(null = True)
+ private_key_id = RSAPrivateKeyField(null = True)
latest_crl = CRLField(null = True)
crl_published = SundialField(null = True)
latest_ca_cert = CertificateField(null = True)
- manifest_private_key_id = KeyField(null = True)
- manifest_public_key = KeyField(null = True)
+ manifest_private_key_id = RSAPrivateKeyField(null = True)
+ manifest_public_key = PublicKeyField(null = True)
latest_manifest_cert = CertificateField(null = True)
latest_manifest = ManifestField(null = True)
manifest_published = SundialField(null = True)
@@ -961,12 +1027,12 @@ class CADetail(models.Model):
return target.asn <= me.asn and target.v4 <= me.v4 and target.v6 <= me.v6
- def activate(self, ca, cert, uri, callback, errback, predecessor = None):
+ def activate(self, rpkid, ca, cert, uri, callback, errback, predecessor = None):
"""
Activate this ca_detail.
"""
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
self.latest_ca_cert = cert
self.ca_cert_uri = uri
self.generate_manifest_cert()
@@ -1015,7 +1081,7 @@ class CADetail(models.Model):
logger.debug("Deleting %r", self)
self.delete()
- def revoke(self, cb, eb):
+ def revoke(self, rpkid, cb, eb):
"""
Request revocation of all certificates whose SKI matches the key
for this ca_detail.
@@ -1056,7 +1122,7 @@ class CADetail(models.Model):
nextUpdate = nextUpdate.later(self.latest_manifest.getNextUpdate())
if self.latest_crl is not None:
nextUpdate = nextUpdate.later(self.latest_crl.getNextUpdate())
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
for child_cert in self.child_certs.all():
nextUpdate = nextUpdate.later(child_cert.cert.getNotAfter())
child_cert.revoke(publisher = publisher)
@@ -1077,10 +1143,10 @@ class CADetail(models.Model):
self.save()
publisher.call_pubd(cb, eb)
logger.debug("Asking parent to revoke CA certificate %s", gski)
- parent.up_down_revoke_query(class_name, gski, parent_revoked, eb)
+ parent.up_down_revoke_query(rpkid = rpkid, class_name = class_name, ski = gski, cb = parent_revoked, eb = eb)
- def update(self, parent, ca, rc, sia_uri_changed, old_resources, callback, errback):
+ def update(self, rpkid, parent, ca, rc, sia_uri_changed, old_resources, callback, errback):
"""
Need to get a new certificate for this ca_detail and perhaps frob
children of this ca_detail.
@@ -1092,9 +1158,9 @@ class CADetail(models.Model):
cert_url = c.get("cert_url")
logger.debug("CA %r received certificate %s", self, cert_url)
if self.state == "pending":
- return self.activate(ca = ca, cert = cert, uri = cert_url, callback = callback, errback = errback)
+ return self.activate(rpkid = rpkid, ca = ca, cert = cert, uri = cert_url, callback = callback, errback = errback)
validity_changed = self.latest_ca_cert is None or self.latest_ca_cert.getNotAfter() != cert.getNotAfter()
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
if self.latest_ca_cert != cert:
self.latest_ca_cert = cert
self.save()
@@ -1115,7 +1181,7 @@ class CADetail(models.Model):
ghostbuster.update(publisher = publisher, fast = True)
publisher.call_pubd(callback, errback)
logger.debug("Sending issue request to %r from %r", parent, self.update)
- parent.up_down_issue_query(ca, self, issued, errback)
+ parent.up_down_issue_query(rpkid = rpkid, ca = ca, ca_detail = self, cb = issued, eb = errback)
@classmethod
@@ -1308,12 +1374,12 @@ class CADetail(models.Model):
self.save()
- def reissue(self, cb, eb):
+ def reissue(self, rpkid, cb, eb):
"""
Reissue all current certificates issued by this ca_detail.
"""
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
self.check_failed_publication(publisher)
for roa in self.roas.all():
roa.regenerate(publisher, fast = True)
@@ -1395,7 +1461,7 @@ class CADetail(models.Model):
new_obj = ghostbuster.ghostbuster,
repository = repository,
handler = ghostbuster.published_callback)
- for ee_cert in self.ee_certs.filter(published__isnull = False, published__lt = stale):
+ for ee_cert in self.ee_certificates.filter(published__isnull = False, published__lt = stale):
logger.debug("Retrying publication for %s", ee_cert)
publisher.queue(
uri = ee_cert.uri,
@@ -1404,6 +1470,7 @@ class CADetail(models.Model):
handler = ee_cert.published_callback)
+@xml_hooks
class Child(models.Model):
child_handle = models.SlugField(max_length = 255)
bpki_cert = CertificateField(null = True)
@@ -1422,24 +1489,24 @@ class Child(models.Model):
elements = ("bpki_cert", "bpki_glue"))
- def xml_pre_delete_hook(self, cb, eb):
- publisher = rpki.rpkid.publication_queue()
+ def xml_pre_delete_hook(self, rpkid, cb, eb):
+ publisher = rpki.rpkid.publication_queue(rpkid)
for child_cert in self.child_certs.all():
child_cert.revoke(publisher = publisher, generate_crl_and_manifest = True)
publisher.call_pubd(cb, eb)
- def xml_post_save_hook(self, q_pdu, cb, eb):
+ def xml_post_save_hook(self, rpkid, q_pdu, cb, eb):
if q_pdu.get("clear_replay_protection"):
self.clear_replay_protection()
if q_pdu.get("reissue"):
- self.serve_reissue(cb, eb)
+ self.serve_reissue(rpkid, cb, eb)
else:
cb()
- def serve_reissue(self, cb, eb):
- publisher = rpki.rpkid.publication_queue()
+ def serve_reissue(self, rpkid, cb, eb):
+ publisher = rpki.rpkid.publication_queue(rpkid)
for child_cert in self.child_certs.all():
child_cert.reissue(child_cert.ca_detail, publisher, force = True)
publisher.call_pubd(cb, eb)
@@ -1450,7 +1517,7 @@ class Child(models.Model):
self.save()
- def up_down_handle_list(self, q_msg, r_msg, callback, errback):
+ def up_down_handle_list(self, rpkid, q_msg, r_msg, callback, errback):
def got_resources(irdb_resources):
if irdb_resources.valid_until < rpki.sundial.now():
logger.debug("Child %s's resources expired %s", self.child_handle, irdb_resources.valid_until)
@@ -1473,10 +1540,10 @@ class Child(models.Model):
c.text = child_cert.cert.get_Base64()
SubElement(rc, rpki.up_down.tag_issuer).text = ca_detail.latest_ca_cert.get_Base64()
callback()
- self.gctx.irdb_query_child_resources(self.self.self_handle, self.child_handle, got_resources, errback)
+ rpkid.irdb_query_child_resources(self.self.self_handle, self.child_handle, got_resources, errback)
- def up_down_handle_issue(self, q_msg, r_msg, callback, errback):
+ def up_down_handle_issue(self, rpkid, q_msg, r_msg, callback, errback):
def got_resources(irdb_resources):
@@ -1504,12 +1571,12 @@ class Child(models.Model):
# Generate new cert or regenerate old one if necessary
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
try:
child_cert = self.child_certs.get(ca_detail = ca_detail, ski = req_key.get_SKI())
- except ChildCert.NotFound:
+ except ChildCert.DoesNotExist:
child_cert = ca_detail.issue(
ca = ca_detail.ca,
child = self,
@@ -1537,14 +1604,21 @@ class Child(models.Model):
class_name = req.get("class_name")
pkcs10 = rpki.x509.PKCS10(Base64 = req.text)
+
+ # XXX
+ logger.debug("Child.up_down_handle_issue(): PKCS #10 %s", pkcs10.get_Base64())
+ sia = pkcs10.get_SIA()
+ logger.debug("Child.up_down_handle_issue(): PKCS #10 SIA %r (%r, %r, %r, %r) %r",
+ type(sia), type(sia[0]), type(sia[1]), type(sia[2]), type(sia[3]), sia)
+
pkcs10.check_valid_request_ca()
ca_detail = CADetail.objects.get(ca__parent__self = self.self,
- ca__parent_class_name = class_name,
+ ca__parent_resource_class = class_name,
state = "active")
- self.gctx.irdb_query_child_resources(self.self.self_handle, self.child_handle, got_resources, errback)
+ rpkid.irdb_query_child_resources(self.self.self_handle, self.child_handle, got_resources, errback)
- def up_down_handle_revoke(self, q_msg, r_msg, callback, errback):
+ def up_down_handle_revoke(self, rpkid, q_msg, r_msg, callback, errback):
def done():
SubElement(r_msg, key.tag, class_name = class_name, ski = key.get("ski"))
callback()
@@ -1552,15 +1626,15 @@ class Child(models.Model):
assert key.tag == rpki.up_down.tag_key
class_name = key.get("class_name")
ski = base64.urlsafe_b64decode(key.get("ski") + "=")
- publisher = rpki.rpkid.publication_queue()
+ publisher = rpki.rpkid.publication_queue(rpkid)
for child_cert in ChildCert.objects.filter(ca_detail__ca__parent__self = self.self,
- ca_detail__ca__parent_class_name = class_name,
+ ca_detail__ca__parent_resource_class = class_name,
ski = ski):
child_cert.revoke(publisher = publisher)
publisher.call_pubd(done, errback)
- def serve_up_down(self, q_der, callback):
+ def serve_up_down(self, rpkid, q_der, callback):
"""
Outer layer of server handling for one up-down PDU from this child.
"""
@@ -1579,7 +1653,7 @@ class Child(models.Model):
if self.bsc is None:
raise rpki.exceptions.BSCNotFound("Could not find BSC")
q_cms = rpki.up_down.cms_msg(DER = q_der)
- q_msg = q_cms.unwrap((self.gctx.bpki_ta,
+ q_msg = q_cms.unwrap((rpkid.bpki_ta,
self.self.bpki_cert,
self.self.bpki_glue,
self.bpki_cert,
@@ -1595,7 +1669,7 @@ class Child(models.Model):
sender = q_msg.get("recipient"), recipient = q_msg.get("sender"), type = q_type + "_response")
try:
- getattr(self, "up_down_handle_" + q_type)(q_msg, r_msg, done, lose)
+ getattr(self, "up_down_handle_" + q_type)(rpkid, q_msg, r_msg, done, lose)
except (rpki.async.ExitNow, SystemExit):
raise
except Exception, e:
@@ -1719,12 +1793,12 @@ class ChildCert(models.Model):
self.save()
-class EECert(models.Model):
+class EECertificate(models.Model):
ski = BlobField()
cert = CertificateField()
published = SundialField(null = True)
- self = models.ForeignKey(Self, related_name = "ee_certs")
- ca_detail = models.ForeignKey(CADetail, related_name = "ee_certs")
+ self = models.ForeignKey(Self, related_name = "ee_certificates")
+ ca_detail = models.ForeignKey(CADetail, related_name = "ee_certificates")
@property
@@ -1733,7 +1807,7 @@ class EECert(models.Model):
Calculate g(SKI), for ease of comparison with XML.
Although, really, one has to ask why we don't just store g(SKI)
- in rpkid.sql instead of ski....
+ instead of SKI....
"""
return base64.urlsafe_b64encode(self.ski).rstrip("=")
@@ -1779,7 +1853,8 @@ class EECert(models.Model):
cn = cn,
sn = sn,
eku = eku)
- self = cls(self = ca_detail.ca.parent.self, ca_detail_id = ca_detail.ca_detail_id, cert = cert)
+ self = cls(ca_detail = ca_detail, cert = cert, ski = subject_key.get_SKI())
+ self.self = ca_detail.ca.parent.self
publisher.queue(
uri = self.uri,
new_obj = self.cert,
@@ -2135,8 +2210,15 @@ class ROA(models.Model):
v4 = rpki.resource_set.resource_set_ipv4(self.ipv4)
v6 = rpki.resource_set.resource_set_ipv6(self.ipv6)
- if self.ca_detail is not None and self.ca_detail.state == "active" and not self.ca_detail.has_expired():
- logger.debug("Keeping old ca_detail %r for ROA %r", self.ca_detail, self)
+ # http://stackoverflow.com/questions/26270042/how-do-you-catch-this-exception
+ # "Django is amazing when its not terrifying."
+ try:
+ ca_detail = self.ca_detail
+ except CADetail.DoesNotExist:
+ ca_detail = None
+
+ if ca_detail is not None and ca_detail.state == "active" and not ca_detail.has_expired():
+ logger.debug("Keeping old ca_detail %r for ROA %r", ca_detail, self)
else:
logger.debug("Searching for new ca_detail for ROA %r", self)
for ca_detail in CADetail.objects.filter(ca__parent__self = self.self, state = "active"):
@@ -2156,7 +2238,11 @@ class ROA(models.Model):
resources = resources,
subject_key = keypair.get_public(),
sia = (None, None, self.uri_from_key(keypair), self.ca_detail.ca.parent.repository.rrdp_notification_uri))
- self.roa = rpki.x509.ROA.build(self.asn, self.ipv4, self.ipv6, keypair, (self.cert,))
+ self.roa = rpki.x509.ROA.build(self.asn,
+ rpki.resource_set.roa_prefix_set_ipv4(self.ipv4),
+ rpki.resource_set.roa_prefix_set_ipv6(self.ipv6),
+ keypair,
+ (self.cert,))
self.published = rpki.sundial.now()
self.save()
diff --git a/rpki/sql.py b/rpki/sql.py
deleted file mode 100644
index 55e6f7cb..00000000
--- a/rpki/sql.py
+++ /dev/null
@@ -1,472 +0,0 @@
-# $Id$
-#
-# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
-# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-# PERFORMANCE OF THIS SOFTWARE.
-#
-# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
-# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-# PERFORMANCE OF THIS SOFTWARE.
-
-"""
-SQL interface code.
-"""
-
-import logging
-import weakref
-
-from rpki.mysql_import import (MySQLdb, _mysql_exceptions)
-
-import rpki.x509
-import rpki.resource_set
-import rpki.sundial
-import rpki.log
-
-logger = logging.getLogger(__name__)
-
-class session(object):
- """
- SQL session layer.
- """
-
- ## @var ping_threshold
- # Timeout after which we should issue a ping command before the real
- # one. Intent is to keep the MySQL connection alive without pinging
- # before every single command.
-
- ping_threshold = rpki.sundial.timedelta(seconds = 60)
-
- def __init__(self, cfg, autocommit = True):
-
- self.username = cfg.get("sql-username")
- self.database = cfg.get("sql-database")
- self.password = cfg.get("sql-password")
- self.autocommit = autocommit
-
- self.conv = MySQLdb.converters.conversions.copy()
- self.conv.update({
- rpki.sundial.datetime : MySQLdb.converters.DateTime2literal,
- MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None })
-
- self.cache = weakref.WeakValueDictionary()
- self.dirty = set()
-
- self.connect()
-
- def connect(self):
- self.db = MySQLdb.connect(user = self.username,
- db = self.database,
- passwd = self.password,
- conv = self.conv)
- self.cur = self.db.cursor()
- self.db.autocommit(self.autocommit)
- self.timestamp = rpki.sundial.now()
-
- def close(self):
- if self.cur:
- self.cur.close()
- self.cur = None
- if self.db:
- self.db.close()
- self.db = None
-
- def _wrap_execute(self, func, query, args):
- try:
- now = rpki.sundial.now()
- if now > self.timestamp + self.ping_threshold:
- self.db.ping(True)
- self.timestamp = now
- return func(query, args)
- except _mysql_exceptions.MySQLError:
- if self.dirty:
- logger.warning("MySQL exception with dirty objects in SQL cache!")
- raise
-
- def execute(self, query, args = None):
- return self._wrap_execute(self.cur.execute, query, args)
-
- def executemany(self, query, args):
- return self._wrap_execute(self.cur.executemany, query, args)
-
- def fetchall(self):
- return self.cur.fetchall()
-
- def lastrowid(self):
- return self.cur.lastrowid
-
- def commit(self):
- """
- Sweep cache, then commit SQL.
- """
-
- self.sweep()
- logger.debug("Executing SQL COMMIT")
- self.db.commit()
-
- def rollback(self):
- """
- SQL rollback, then clear cache and dirty cache.
-
- NB: We have no way of clearing other references to cached objects,
- so if you call this method you MUST forget any state that might
- cause you to retain such references. This is probably tricky, and
- is itself a good argument for switching to something like the
- Django ORM's @commit_on_success semantics, but we do what we can.
- """
-
- logger.debug("Executing SQL ROLLBACK, discarding SQL cache and dirty set")
- self.db.rollback()
- self.dirty.clear()
- self.cache.clear()
-
- def cache_clear(self):
- """
- Clear the SQL object cache. Shouldn't be necessary now that the
- cache uses weak references, but should be harmless.
- """
-
- logger.debug("Clearing SQL cache")
- self.assert_pristine()
- self.cache.clear()
-
- def assert_pristine(self):
- """
- Assert that there are no dirty objects in the cache.
- """
-
- assert not self.dirty, "Dirty objects in SQL cache: %s" % self.dirty
-
- def sweep(self):
- """
- Write any dirty objects out to SQL.
- """
-
- for s in self.dirty.copy():
- logger.debug("Sweeping (%s) %r", "deleting" if s.sql_deleted else "storing", s)
- if s.sql_deleted:
- s.sql_delete()
- else:
- s.sql_store()
- self.assert_pristine()
-
-class template(object):
- """
- SQL template generator.
- """
-
- def __init__(self, table_name, index_column, *data_columns):
- """
- Build a SQL template.
- """
-
- type_map = dict((x[0], x[1]) for x in data_columns if isinstance(x, tuple))
- data_columns = tuple(isinstance(x, tuple) and x[0] or x for x in data_columns)
- columns = (index_column,) + data_columns
- self.table = table_name
- self.index = index_column
- self.columns = columns
- self.map = type_map
- self.select = "SELECT %s FROM %s" % (", ".join("%s.%s" % (table_name, c) for c in columns), table_name)
- self.insert = "INSERT %s (%s) VALUES (%s)" % (table_name,
- ", ".join(data_columns),
- ", ".join("%(" + s + ")s" for s in data_columns))
- self.update = "UPDATE %s SET %s WHERE %s = %%(%s)s" % (table_name,
- ", ".join(s + " = %(" + s + ")s" for s in data_columns),
- index_column,
- index_column)
- self.delete = "DELETE FROM %s WHERE %s = %%s" % (table_name, index_column)
-
-class sql_persistent(object):
- """
- Mixin for persistent class that needs to be stored in SQL.
- """
-
- ## @var sql_in_db
- # Whether this object is already in SQL or not.
-
- sql_in_db = False
-
- ## @var sql_deleted
- # Whether our cached copy of this object has been deleted.
-
- sql_deleted = False
-
- ## @var sql_debug
- # Enable logging of SQL actions
-
- sql_debug = False
-
- ## @var sql_cache_debug
- # Enable debugging of SQL cache actions
-
- sql_cache_debug = False
-
- @classmethod
- def sql_fetch(cls, gctx, id): # pylint: disable=W0622
- """
- Fetch one object from SQL, based on its primary key.
-
- Since in this one case we know that the primary index is also the
- cache key, we check for a cache hit directly in the hope of
- bypassing the SQL lookup entirely.
-
- This method is usually called via a one-line class-specific
- wrapper. As a convenience, we also accept an id of None, and just
- return None in this case.
- """
-
- if id is None:
- return None
- assert isinstance(id, (int, long)), "id should be an integer, was %r" % type(id)
- key = (cls, id)
- if key in gctx.sql.cache:
- return gctx.sql.cache[key]
- else:
- return cls.sql_fetch_where1(gctx, "%s = %%s" % cls.sql_template.index, (id,))
-
- @classmethod
- def sql_fetch_where1(cls, gctx, where, args = None, also_from = None):
- """
- Fetch one object from SQL, based on an arbitrary SQL WHERE expression.
- """
-
- results = cls.sql_fetch_where(gctx, where, args, also_from)
- if len(results) == 0:
- return None
- elif len(results) == 1:
- return results[0]
- else:
- raise rpki.exceptions.DBConsistancyError(
- "Database contained multiple matches for %s where %s: %r" %
- (cls.__name__, where % tuple(repr(a) for a in args), results))
-
- @classmethod
- def sql_fetch_all(cls, gctx):
- """
- Fetch all objects of this type from SQL.
- """
-
- return cls.sql_fetch_where(gctx, None)
-
- @classmethod
- def sql_fetch_where(cls, gctx, where, args = None, also_from = None):
- """
- Fetch objects of this type matching an arbitrary SQL WHERE expression.
- """
-
- if where is None:
- assert args is None and also_from is None
- if cls.sql_debug:
- logger.debug("sql_fetch_where(%r)", cls.sql_template.select)
- gctx.sql.execute(cls.sql_template.select)
- else:
- query = cls.sql_template.select
- if also_from is not None:
- query += "," + also_from
- query += " WHERE " + where
- if cls.sql_debug:
- logger.debug("sql_fetch_where(%r, %r)", query, args)
- gctx.sql.execute(query, args)
- results = []
- for row in gctx.sql.fetchall():
- key = (cls, row[0])
- if key in gctx.sql.cache:
- results.append(gctx.sql.cache[key])
- else:
- results.append(cls.sql_init(gctx, row, key))
- return results
-
- @classmethod
- def sql_init(cls, gctx, row, key):
- """
- Initialize one Python object from the result of a SQL query.
- """
-
- self = cls()
- self.gctx = gctx
- self.sql_decode(dict(zip(cls.sql_template.columns, row)))
- gctx.sql.cache[key] = self
- self.sql_in_db = True
- self.sql_fetch_hook()
- return self
-
- def sql_mark_dirty(self):
- """
- Mark this object as needing to be written back to SQL.
- """
-
- if self.sql_cache_debug and not self.sql_is_dirty:
- logger.debug("Marking %r SQL dirty", self)
- self.gctx.sql.dirty.add(self)
-
- def sql_mark_clean(self):
- """
- Mark this object as not needing to be written back to SQL.
- """
-
- if self.sql_cache_debug and self.sql_is_dirty:
- logger.debug("Marking %r SQL clean", self)
- self.gctx.sql.dirty.discard(self)
-
- @property
- def sql_is_dirty(self):
- """
- Query whether this object needs to be written back to SQL.
- """
-
- return self in self.gctx.sql.dirty
-
- def sql_mark_deleted(self):
- """
- Mark this object as needing to be deleted in SQL.
- """
-
- self.sql_deleted = True
- self.sql_mark_dirty()
-
- def sql_store(self):
- """
- Store this object to SQL.
- """
-
- args = self.sql_encode()
- if not self.sql_in_db:
- if self.sql_debug:
- logger.debug("sql_store(%r, %r)", self.sql_template.insert, args)
- self.gctx.sql.execute(self.sql_template.insert, args)
- setattr(self, self.sql_template.index, self.gctx.sql.lastrowid())
- self.gctx.sql.cache[(self.__class__, self.gctx.sql.lastrowid())] = self
- self.sql_insert_hook()
- else:
- if self.sql_debug:
- logger.debug("sql_store(%r, %r)", self.sql_template.update, args)
- self.gctx.sql.execute(self.sql_template.update, args)
- self.sql_update_hook()
- key = (self.__class__, getattr(self, self.sql_template.index))
- assert key in self.gctx.sql.cache and self.gctx.sql.cache[key] == self
- self.sql_mark_clean()
- self.sql_in_db = True
-
- def sql_delete(self):
- """
- Delete this object from SQL.
- """
-
- if self.sql_in_db:
- id = getattr(self, self.sql_template.index) # pylint: disable=W0622
- if self.sql_debug:
- logger.debug("sql_delete(%r, %r)", self.sql_template.delete, id)
- self.sql_delete_hook()
- self.gctx.sql.execute(self.sql_template.delete, (id,))
- key = (self.__class__, id)
- if self.gctx.sql.cache.get(key) == self:
- del self.gctx.sql.cache[key]
- self.sql_in_db = False
- self.sql_mark_clean()
-
- def sql_encode(self):
- """
- Convert object attributes into a dict for use with canned SQL
- queries. This is a default version that assumes a one-to-one
- mapping between column names in SQL and attribute names in Python.
- If you need something fancier, override this.
- """
-
- d = dict((a, getattr(self, a, None)) for a in self.sql_template.columns)
- for i in self.sql_template.map:
- if d.get(i) is not None:
- d[i] = self.sql_template.map[i].to_sql(d[i])
- return d
-
- def sql_decode(self, vals):
- """
- Initialize an object with values returned by self.sql_fetch().
- This is a default version that assumes a one-to-one mapping
- between column names in SQL and attribute names in Python. If you
- need something fancier, override this.
- """
-
- for a in self.sql_template.columns:
- if vals.get(a) is not None and a in self.sql_template.map:
- setattr(self, a, self.sql_template.map[a].from_sql(vals[a]))
- else:
- setattr(self, a, vals[a])
-
- def sql_fetch_hook(self):
- """
- Customization hook.
- """
-
- pass
-
- def sql_insert_hook(self):
- """
- Customization hook.
- """
-
- pass
-
- def sql_update_hook(self):
- """
- Customization hook.
- """
-
- self.sql_delete_hook()
- self.sql_insert_hook()
-
- def sql_delete_hook(self):
- """
- Customization hook.
- """
-
- pass
-
-
-def cache_reference(func):
- """
- Decorator for use with property methods which just do an SQL lookup based on an ID.
- Check for an existing reference to the object, just return that if we find it,
- otherwise perform the SQL lookup.
-
- Not 100% certain this is a good idea, but I //think// it should work well with the
- current weak reference SQL cache, so long as we create no circular references.
- So don't do that.
- """
-
- attr_name = "_" + func.__name__
-
- def wrapped(self):
- try:
- value = getattr(self, attr_name)
- assert value is not None
- except AttributeError:
- value = func(self)
- if value is not None:
- setattr(self, attr_name, value)
- return value
-
- wrapped.__name__ = func.__name__
- wrapped.__doc__ = func.__doc__
- wrapped.__dict__.update(func.__dict__)
-
- return wrapped
diff --git a/rpki/sql_schemas.py b/rpki/sql_schemas.py
deleted file mode 100644
index 6c8c764d..00000000
--- a/rpki/sql_schemas.py
+++ /dev/null
@@ -1,335 +0,0 @@
-# Automatically generated, do not edit.
-
-## @var rpkid
-## SQL schema rpkid
-rpkid = '''-- $Id: rpkid.sql 6120 2015-10-16 04:56:43Z sra $
-
--- Copyright (C) 2012--2014 Dragon Research Labs ("DRL")
--- Portions copyright (C) 2009--2011 Internet Systems Consortium ("ISC")
--- Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
---
--- Permission to use, copy, modify, and distribute this software for any
--- purpose with or without fee is hereby granted, provided that the above
--- copyright notices and this permission notice appear in all copies.
---
--- THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL
--- WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
--- WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL,
--- ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
--- CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
--- OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
--- NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
--- WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--- SQL objects needed by the RPKI engine (rpkid.py).
-
--- DROP TABLE commands must be in correct (reverse dependency) order
--- to satisfy FOREIGN KEY constraints.
-
-DROP TABLE IF EXISTS ee_cert;
-DROP TABLE IF EXISTS ghostbuster;
-DROP TABLE IF EXISTS roa_prefix;
-DROP TABLE IF EXISTS roa;
-DROP TABLE IF EXISTS revoked_cert;
-DROP TABLE IF EXISTS child_cert;
-DROP TABLE IF EXISTS child;
-DROP TABLE IF EXISTS ca_detail;
-DROP TABLE IF EXISTS ca;
-DROP TABLE IF EXISTS parent;
-DROP TABLE IF EXISTS repository;
-DROP TABLE IF EXISTS bsc;
-DROP TABLE IF EXISTS self;
-
-CREATE TABLE self (
- self_id SERIAL NOT NULL,
- self_handle VARCHAR(255) NOT NULL,
- use_hsm BOOLEAN NOT NULL DEFAULT FALSE,
- crl_interval BIGINT UNSIGNED,
- regen_margin BIGINT UNSIGNED,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- PRIMARY KEY (self_id),
- UNIQUE (self_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE bsc (
- bsc_id SERIAL NOT NULL,
- bsc_handle VARCHAR(255) NOT NULL,
- private_key_id LONGBLOB,
- pkcs10_request LONGBLOB,
- hash_alg ENUM ('sha256'),
- signing_cert LONGBLOB,
- signing_cert_crl LONGBLOB,
- self_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (bsc_id),
- CONSTRAINT bsc_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- UNIQUE (self_id, bsc_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE repository (
- repository_id SERIAL NOT NULL,
- repository_handle VARCHAR(255) NOT NULL,
- peer_contact_uri TEXT,
- rrdp_notification_uri TEXT,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- last_cms_timestamp DATETIME,
- bsc_id BIGINT UNSIGNED NOT NULL,
- self_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (repository_id),
- CONSTRAINT repository_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT repository_bsc_id
- FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE,
- UNIQUE (self_id, repository_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE parent (
- parent_id SERIAL NOT NULL,
- parent_handle VARCHAR(255) NOT NULL,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- peer_contact_uri TEXT,
- sia_base TEXT,
- sender_name TEXT,
- recipient_name TEXT,
- last_cms_timestamp DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- bsc_id BIGINT UNSIGNED NOT NULL,
- repository_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (parent_id),
- CONSTRAINT parent_repository_id
- FOREIGN KEY (repository_id) REFERENCES repository (repository_id) ON DELETE CASCADE,
- CONSTRAINT parent_bsc_id
- FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE,
- CONSTRAINT parent_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- UNIQUE (self_id, parent_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE ca (
- ca_id SERIAL NOT NULL,
- last_crl_sn BIGINT UNSIGNED NOT NULL,
- last_manifest_sn BIGINT UNSIGNED NOT NULL,
- next_manifest_update DATETIME,
- next_crl_update DATETIME,
- last_issued_sn BIGINT UNSIGNED NOT NULL,
- sia_uri TEXT,
- parent_resource_class TEXT,
- parent_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ca_id),
- CONSTRAINT ca_parent_id
- FOREIGN KEY (parent_id) REFERENCES parent (parent_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE ca_detail (
- ca_detail_id SERIAL NOT NULL,
- public_key LONGBLOB,
- private_key_id LONGBLOB,
- latest_crl LONGBLOB,
- crl_published DATETIME,
- latest_ca_cert LONGBLOB,
- manifest_private_key_id LONGBLOB,
- manifest_public_key LONGBLOB,
- latest_manifest_cert LONGBLOB,
- latest_manifest LONGBLOB,
- manifest_published DATETIME,
- state ENUM ('pending', 'active', 'deprecated', 'revoked') NOT NULL,
- ca_cert_uri TEXT,
- ca_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ca_detail_id),
- CONSTRAINT ca_detail_ca_id
- FOREIGN KEY (ca_id) REFERENCES ca (ca_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE child (
- child_id SERIAL NOT NULL,
- child_handle VARCHAR(255) NOT NULL,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- last_cms_timestamp DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- bsc_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (child_id),
- CONSTRAINT child_bsc_id
- FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE,
- CONSTRAINT child_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- UNIQUE (self_id, child_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE child_cert (
- child_cert_id SERIAL NOT NULL,
- cert LONGBLOB NOT NULL,
- published DATETIME,
- ski TINYBLOB NOT NULL,
- child_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (child_cert_id),
- CONSTRAINT child_cert_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE,
- CONSTRAINT child_cert_child_id
- FOREIGN KEY (child_id) REFERENCES child (child_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE revoked_cert (
- revoked_cert_id SERIAL NOT NULL,
- serial BIGINT UNSIGNED NOT NULL,
- revoked DATETIME NOT NULL,
- expires DATETIME NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (revoked_cert_id),
- CONSTRAINT revoked_cert_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE roa (
- roa_id SERIAL NOT NULL,
- asn BIGINT UNSIGNED NOT NULL,
- cert LONGBLOB NOT NULL,
- roa LONGBLOB NOT NULL,
- published DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (roa_id),
- CONSTRAINT roa_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT roa_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE roa_prefix (
- prefix VARCHAR(40) NOT NULL,
- prefixlen TINYINT UNSIGNED NOT NULL,
- max_prefixlen TINYINT UNSIGNED NOT NULL,
- version TINYINT UNSIGNED NOT NULL,
- roa_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (roa_id, prefix, prefixlen, max_prefixlen),
- CONSTRAINT roa_prefix_roa_id
- FOREIGN KEY (roa_id) REFERENCES roa (roa_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE ghostbuster (
- ghostbuster_id SERIAL NOT NULL,
- vcard LONGBLOB NOT NULL,
- cert LONGBLOB NOT NULL,
- ghostbuster LONGBLOB NOT NULL,
- published DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ghostbuster_id),
- CONSTRAINT ghostbuster_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT ghostbuster_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE ee_cert (
- ee_cert_id SERIAL NOT NULL,
- ski BINARY(20) NOT NULL,
- cert LONGBLOB NOT NULL,
- published DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ee_cert_id),
- CONSTRAINT ee_cert_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT ee_cert_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
--- Local Variables:
--- indent-tabs-mode: nil
--- End:
-'''
-
-## @var pubd
-## SQL schema pubd
-pubd = '''-- $Id: pubd.sql 5914 2014-08-06 22:52:28Z sra $
-
--- Copyright (C) 2012--2014 Dragon Research Labs ("DRL")
--- Portions copyright (C) 2009--2010 Internet Systems Consortium ("ISC")
--- Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN")
---
--- Permission to use, copy, modify, and distribute this software for any
--- purpose with or without fee is hereby granted, provided that the above
--- copyright notices and this permission notice appear in all copies.
---
--- THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL
--- WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
--- WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL,
--- ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
--- CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
--- OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
--- NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
--- WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--- SQL objects needed by pubd.py.
-
--- Old tables that should just be flushed if present at all.
-
-DROP TABLE IF EXISTS config;
-DROP TABLE IF EXISTS snapshot;
-
--- DROP TABLE commands must be in correct (reverse dependency) order
--- to satisfy FOREIGN KEY constraints.
-
-DROP TABLE IF EXISTS object;
-DROP TABLE IF EXISTS delta;
-DROP TABLE IF EXISTS session;
-DROP TABLE IF EXISTS client;
-
-CREATE TABLE client (
- client_id SERIAL NOT NULL,
- client_handle VARCHAR(255) NOT NULL,
- base_uri TEXT,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- last_cms_timestamp DATETIME,
- PRIMARY KEY (client_id),
- UNIQUE (client_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE session (
- session_id SERIAL NOT NULL,
- uuid VARCHAR(36) NOT NULL,
- serial BIGINT UNSIGNED NOT NULL,
- snapshot LONGTEXT,
- hash CHAR(64),
- PRIMARY KEY (session_id),
- UNIQUE (uuid)
-) ENGINE=InnoDB;
-
-CREATE TABLE delta (
- delta_id SERIAL NOT NULL,
- serial BIGINT UNSIGNED NOT NULL,
- xml LONGTEXT NOT NULL,
- hash CHAR(64) NOT NULL,
- expires DATETIME NOT NULL,
- session_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (delta_id),
- CONSTRAINT delta_session_id
- FOREIGN KEY (session_id) REFERENCES session (session_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE object (
- object_id SERIAL NOT NULL,
- uri VARCHAR(255) NOT NULL,
- der LONGBLOB NOT NULL,
- hash CHAR(64) NOT NULL,
- client_id BIGINT UNSIGNED NOT NULL,
- session_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (object_id),
- CONSTRAINT object_client_id
- FOREIGN KEY (client_id) REFERENCES client (client_id) ON DELETE CASCADE,
- CONSTRAINT object_session_id
- FOREIGN KEY (session_id) REFERENCES session (session_id) ON DELETE CASCADE,
- UNIQUE (session_id, hash)
-) ENGINE=InnoDB;
-
--- Local Variables:
--- indent-tabs-mode: nil
--- End:
-'''
-
diff --git a/rpki/x509.py b/rpki/x509.py
index 2d50b129..1be2f9a3 100644
--- a/rpki/x509.py
+++ b/rpki/x509.py
@@ -1067,10 +1067,10 @@ class PKCS10(DER_object):
self.check_valid_request_common()
- alg = self.get_POW().getSignatureAlgorithm()
- bc = self.get_POW().getBasicConstraints()
- eku = self.get_POW().getEKU()
- sias = self.get_POW().getSIA()
+ alg = self.get_POW().getSignatureAlgorithm()
+ bc = self.get_POW().getBasicConstraints()
+ eku = self.get_POW().getEKU()
+ sia = self.get_POW().getSIA()
if alg != rpki.oids.sha256WithRSAEncryption:
raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for CA: %s" % alg)
@@ -1081,10 +1081,12 @@ class PKCS10(DER_object):
if eku is not None:
raise rpki.exceptions.BadPKCS10("PKCS #10 CA EKU not allowed")
- if sias is None:
+ if sia is None:
raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA missing")
- caRepository, rpkiManifest, signedObject, rpkiNotify = sias
+ caRepository, rpkiManifest, signedObject, rpkiNotify = sia
+
+ logger.debug("check_valid_request_ca(): sia: %r", sia)
if signedObject:
raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must not have id-ad-signedObject")
@@ -1136,6 +1138,8 @@ class PKCS10(DER_object):
bc = self.get_POW().getBasicConstraints()
sia = self.get_POW().getSIA()
+ logger.debug("check_valid_request_ee(): sia: %r", sia)
+
caRepository, rpkiManifest, signedObject, rpkiNotify = sia or (None, None, None, None)
if alg not in (rpki.oids.sha256WithRSAEncryption, rpki.oids.ecdsa_with_SHA256):
@@ -1197,23 +1201,9 @@ class PKCS10(DER_object):
Create a new request for a given keypair.
"""
- assert exts is None, "Old calling sequence to rpki.x509.PKCS10.create()"
-
if cn is None:
cn = "".join(("%02X" % ord(i) for i in keypair.get_SKI()))
- if isinstance(caRepository, str):
- caRepository = (caRepository,)
-
- if isinstance(rpkiManifest, str):
- rpkiManifest = (rpkiManifest,)
-
- if isinstance(signedObject, str):
- signedObject = (signedObject,)
-
- if isinstance(rpkiNotify, str):
- rpkiNotify = (rpkiNotify,)
-
req = rpki.POW.PKCS10()
req.setVersion(0)
req.setSubject(X501DN.from_cn(cn, sn).get_POW())
@@ -1223,8 +1213,9 @@ class PKCS10(DER_object):
req.setBasicConstraints(True, None)
req.setKeyUsage(cls.expected_ca_keyUsage)
- if caRepository or rpkiManifest or signedObject or rpkiNotify:
- req.setSIA(caRepository, rpkiManifest, signedObject, rpkiNotify)
+ sia = (caRepository, rpkiManifest, signedObject, rpkiNotify)
+ if not all(s is None for s in sia):
+ req.setSIA(*tuple([str(s)] if isinstance(s, (str, unicode)) else s for s in sia))
if eku:
req.setEKU(eku)
@@ -2034,7 +2025,7 @@ class XML_CMS_object(Wrapped_CMS_object):
"""
obj.last_cms_timestamp = self.check_replay(obj.last_cms_timestamp, *context)
- obj.sql_mark_dirty()
+ obj.save()
class SignedReferral(XML_CMS_object):
encoding = "us-ascii"
diff --git a/schemas/sql/pubd.sql b/schemas/sql/pubd.sql
deleted file mode 100644
index 2a0e2851..00000000
--- a/schemas/sql/pubd.sql
+++ /dev/null
@@ -1,85 +0,0 @@
--- $Id$
-
--- Copyright (C) 2012--2014 Dragon Research Labs ("DRL")
--- Portions copyright (C) 2009--2010 Internet Systems Consortium ("ISC")
--- Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN")
---
--- Permission to use, copy, modify, and distribute this software for any
--- purpose with or without fee is hereby granted, provided that the above
--- copyright notices and this permission notice appear in all copies.
---
--- THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL
--- WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
--- WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL,
--- ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
--- CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
--- OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
--- NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
--- WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--- SQL objects needed by pubd.py.
-
--- Old tables that should just be flushed if present at all.
-
-DROP TABLE IF EXISTS config;
-DROP TABLE IF EXISTS snapshot;
-
--- DROP TABLE commands must be in correct (reverse dependency) order
--- to satisfy FOREIGN KEY constraints.
-
-DROP TABLE IF EXISTS object;
-DROP TABLE IF EXISTS delta;
-DROP TABLE IF EXISTS session;
-DROP TABLE IF EXISTS client;
-
-CREATE TABLE client (
- client_id SERIAL NOT NULL,
- client_handle VARCHAR(255) NOT NULL,
- base_uri TEXT,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- last_cms_timestamp DATETIME,
- PRIMARY KEY (client_id),
- UNIQUE (client_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE session (
- session_id SERIAL NOT NULL,
- uuid VARCHAR(36) NOT NULL,
- serial BIGINT UNSIGNED NOT NULL,
- snapshot LONGTEXT,
- hash CHAR(64),
- PRIMARY KEY (session_id),
- UNIQUE (uuid)
-) ENGINE=InnoDB;
-
-CREATE TABLE delta (
- delta_id SERIAL NOT NULL,
- serial BIGINT UNSIGNED NOT NULL,
- xml LONGTEXT NOT NULL,
- hash CHAR(64) NOT NULL,
- expires DATETIME NOT NULL,
- session_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (delta_id),
- CONSTRAINT delta_session_id
- FOREIGN KEY (session_id) REFERENCES session (session_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE object (
- object_id SERIAL NOT NULL,
- uri VARCHAR(255) NOT NULL,
- der LONGBLOB NOT NULL,
- hash CHAR(64) NOT NULL,
- client_id BIGINT UNSIGNED NOT NULL,
- session_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (object_id),
- CONSTRAINT object_client_id
- FOREIGN KEY (client_id) REFERENCES client (client_id) ON DELETE CASCADE,
- CONSTRAINT object_session_id
- FOREIGN KEY (session_id) REFERENCES session (session_id) ON DELETE CASCADE,
- UNIQUE (session_id, hash)
-) ENGINE=InnoDB;
-
--- Local Variables:
--- indent-tabs-mode: nil
--- End:
diff --git a/schemas/sql/rpkid.sql b/schemas/sql/rpkid.sql
deleted file mode 100644
index 14499091..00000000
--- a/schemas/sql/rpkid.sql
+++ /dev/null
@@ -1,240 +0,0 @@
--- $Id$
-
--- Copyright (C) 2012--2014 Dragon Research Labs ("DRL")
--- Portions copyright (C) 2009--2011 Internet Systems Consortium ("ISC")
--- Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
---
--- Permission to use, copy, modify, and distribute this software for any
--- purpose with or without fee is hereby granted, provided that the above
--- copyright notices and this permission notice appear in all copies.
---
--- THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL
--- WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
--- WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL,
--- ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
--- CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
--- OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
--- NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
--- WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
--- SQL objects needed by the RPKI engine (rpkid.py).
-
--- DROP TABLE commands must be in correct (reverse dependency) order
--- to satisfy FOREIGN KEY constraints.
-
-DROP TABLE IF EXISTS ee_cert;
-DROP TABLE IF EXISTS ghostbuster;
-DROP TABLE IF EXISTS roa_prefix;
-DROP TABLE IF EXISTS roa;
-DROP TABLE IF EXISTS revoked_cert;
-DROP TABLE IF EXISTS child_cert;
-DROP TABLE IF EXISTS child;
-DROP TABLE IF EXISTS ca_detail;
-DROP TABLE IF EXISTS ca;
-DROP TABLE IF EXISTS parent;
-DROP TABLE IF EXISTS repository;
-DROP TABLE IF EXISTS bsc;
-DROP TABLE IF EXISTS self;
-
-CREATE TABLE self (
- self_id SERIAL NOT NULL,
- self_handle VARCHAR(255) NOT NULL,
- use_hsm BOOLEAN NOT NULL DEFAULT FALSE,
- crl_interval BIGINT UNSIGNED,
- regen_margin BIGINT UNSIGNED,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- PRIMARY KEY (self_id),
- UNIQUE (self_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE bsc (
- bsc_id SERIAL NOT NULL,
- bsc_handle VARCHAR(255) NOT NULL,
- private_key_id LONGBLOB,
- pkcs10_request LONGBLOB,
- hash_alg ENUM ('sha256'),
- signing_cert LONGBLOB,
- signing_cert_crl LONGBLOB,
- self_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (bsc_id),
- CONSTRAINT bsc_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- UNIQUE (self_id, bsc_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE repository (
- repository_id SERIAL NOT NULL,
- repository_handle VARCHAR(255) NOT NULL,
- peer_contact_uri TEXT,
- rrdp_notification_uri TEXT,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- last_cms_timestamp DATETIME,
- bsc_id BIGINT UNSIGNED NOT NULL,
- self_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (repository_id),
- CONSTRAINT repository_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT repository_bsc_id
- FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE,
- UNIQUE (self_id, repository_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE parent (
- parent_id SERIAL NOT NULL,
- parent_handle VARCHAR(255) NOT NULL,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- peer_contact_uri TEXT,
- sia_base TEXT,
- sender_name TEXT,
- recipient_name TEXT,
- last_cms_timestamp DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- bsc_id BIGINT UNSIGNED NOT NULL,
- repository_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (parent_id),
- CONSTRAINT parent_repository_id
- FOREIGN KEY (repository_id) REFERENCES repository (repository_id) ON DELETE CASCADE,
- CONSTRAINT parent_bsc_id
- FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE,
- CONSTRAINT parent_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- UNIQUE (self_id, parent_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE ca (
- ca_id SERIAL NOT NULL,
- last_crl_sn BIGINT UNSIGNED NOT NULL,
- last_manifest_sn BIGINT UNSIGNED NOT NULL,
- next_manifest_update DATETIME,
- next_crl_update DATETIME,
- last_issued_sn BIGINT UNSIGNED NOT NULL,
- sia_uri TEXT,
- parent_resource_class TEXT,
- parent_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ca_id),
- CONSTRAINT ca_parent_id
- FOREIGN KEY (parent_id) REFERENCES parent (parent_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE ca_detail (
- ca_detail_id SERIAL NOT NULL,
- public_key LONGBLOB,
- private_key_id LONGBLOB,
- latest_crl LONGBLOB,
- crl_published DATETIME,
- latest_ca_cert LONGBLOB,
- manifest_private_key_id LONGBLOB,
- manifest_public_key LONGBLOB,
- latest_manifest_cert LONGBLOB,
- latest_manifest LONGBLOB,
- manifest_published DATETIME,
- state ENUM ('pending', 'active', 'deprecated', 'revoked') NOT NULL,
- ca_cert_uri TEXT,
- ca_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ca_detail_id),
- CONSTRAINT ca_detail_ca_id
- FOREIGN KEY (ca_id) REFERENCES ca (ca_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE child (
- child_id SERIAL NOT NULL,
- child_handle VARCHAR(255) NOT NULL,
- bpki_cert LONGBLOB,
- bpki_glue LONGBLOB,
- last_cms_timestamp DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- bsc_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (child_id),
- CONSTRAINT child_bsc_id
- FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE,
- CONSTRAINT child_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- UNIQUE (self_id, child_handle)
-) ENGINE=InnoDB;
-
-CREATE TABLE child_cert (
- child_cert_id SERIAL NOT NULL,
- cert LONGBLOB NOT NULL,
- published DATETIME,
- ski TINYBLOB NOT NULL,
- child_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (child_cert_id),
- CONSTRAINT child_cert_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE,
- CONSTRAINT child_cert_child_id
- FOREIGN KEY (child_id) REFERENCES child (child_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE revoked_cert (
- revoked_cert_id SERIAL NOT NULL,
- serial BIGINT UNSIGNED NOT NULL,
- revoked DATETIME NOT NULL,
- expires DATETIME NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (revoked_cert_id),
- CONSTRAINT revoked_cert_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE roa (
- roa_id SERIAL NOT NULL,
- asn BIGINT UNSIGNED NOT NULL,
- cert LONGBLOB NOT NULL,
- roa LONGBLOB NOT NULL,
- published DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (roa_id),
- CONSTRAINT roa_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT roa_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE roa_prefix (
- prefix VARCHAR(40) NOT NULL,
- prefixlen TINYINT UNSIGNED NOT NULL,
- max_prefixlen TINYINT UNSIGNED NOT NULL,
- version TINYINT UNSIGNED NOT NULL,
- roa_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (roa_id, prefix, prefixlen, max_prefixlen),
- CONSTRAINT roa_prefix_roa_id
- FOREIGN KEY (roa_id) REFERENCES roa (roa_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE ghostbuster (
- ghostbuster_id SERIAL NOT NULL,
- vcard LONGBLOB NOT NULL,
- cert LONGBLOB NOT NULL,
- ghostbuster LONGBLOB NOT NULL,
- published DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ghostbuster_id),
- CONSTRAINT ghostbuster_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT ghostbuster_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
-CREATE TABLE ee_cert (
- ee_cert_id SERIAL NOT NULL,
- ski BINARY(20) NOT NULL,
- cert LONGBLOB NOT NULL,
- published DATETIME,
- self_id BIGINT UNSIGNED NOT NULL,
- ca_detail_id BIGINT UNSIGNED NOT NULL,
- PRIMARY KEY (ee_cert_id),
- CONSTRAINT ee_cert_self_id
- FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE,
- CONSTRAINT ee_cert_ca_detail_id
- FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-
--- Local Variables:
--- indent-tabs-mode: nil
--- End: