aboutsummaryrefslogtreecommitdiff
path: root/rpkid/rpki/sql.py
diff options
context:
space:
mode:
Diffstat (limited to 'rpkid/rpki/sql.py')
-rw-r--r--rpkid/rpki/sql.py801
1 files changed, 801 insertions, 0 deletions
diff --git a/rpkid/rpki/sql.py b/rpkid/rpki/sql.py
new file mode 100644
index 00000000..022e4dd5
--- /dev/null
+++ b/rpkid/rpki/sql.py
@@ -0,0 +1,801 @@
+# $Id$
+
+# Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+import MySQLdb, time
+import rpki.x509, rpki.resource_set, rpki.sundial
+
+def connect(cfg):
+ """Connect to a MySQL database using connection parameters from an
+ rpki.config.parser object.
+ """
+ return MySQLdb.connect(user = cfg.get("sql-username"),
+ db = cfg.get("sql-database"),
+ passwd = cfg.get("sql-password"))
+
+class template(object):
+ """SQL template generator."""
+ def __init__(self, table_name, index_column, *data_columns):
+ """Build a SQL template."""
+ type_map = dict((x[0],x[1]) for x in data_columns if isinstance(x, tuple))
+ data_columns = tuple(isinstance(x, tuple) and x[0] or x for x in data_columns)
+ columns = (index_column,) + data_columns
+ self.table = table_name
+ self.index = index_column
+ self.columns = columns
+ self.map = type_map
+ self.select = "SELECT %s FROM %s" % (", ".join(columns), table_name)
+ self.insert = "INSERT %s (%s) VALUES (%s)" % (table_name, ", ".join(data_columns),
+ ", ".join("%(" + s + ")s" for s in data_columns))
+ self.update = "UPDATE %s SET %s WHERE %s = %%(%s)s" % \
+ (table_name, ", ".join(s + " = %(" + s + ")s" for s in data_columns),
+ index_column, index_column)
+ self.delete = "DELETE FROM %s WHERE %s = %%s" % (table_name, index_column)
+
+## @var sql_cache
+# Cache of objects pulled from SQL.
+
+sql_cache = {}
+
+## @var sql_dirty
+# Set of objects that need to be written back to SQL.
+
+sql_dirty = set()
+
+def sql_cache_clear():
+ """Clear the object cache."""
+ sql_cache.clear()
+
+def sql_assert_pristine():
+ """Assert that there are no dirty objects in the cache."""
+ assert not sql_dirty, "Dirty objects in SQL cache: %s" % sql_dirty
+
+def sql_sweep(gctx):
+ """Write any dirty objects out to SQL."""
+ for s in sql_dirty.copy():
+ rpki.log.debug("Sweeping %s" % repr(s))
+ s.sql_store(gctx)
+ sql_assert_pristine()
+
+class sql_persistant(object):
+ """Mixin for persistant class that needs to be stored in SQL.
+ """
+
+ ## @var sql_in_db
+ # Whether this object is already in SQL or not.
+ sql_in_db = False
+
+ @classmethod
+ def sql_fetch(cls, gctx, id):
+ """Fetch one object from SQL, based on its primary key. Since in
+ this one case we know that the primary index is also the cache
+ key, we check for a cache hit directly in the hope of bypassing the
+ SQL lookup entirely.
+ """
+ key = (cls, id)
+ if key in sql_cache:
+ return sql_cache[key]
+ else:
+ return cls.sql_fetch_where1(gctx, "%s = %s", (cls.sql_template.index, id))
+
+ @classmethod
+ def sql_fetch_where1(cls, gctx, where, args = None):
+ """Fetch one object from SQL, based on an arbitrary SQL WHERE expression."""
+ results = cls.sql_fetch_where(gctx, where, args)
+ if len(results) == 0:
+ return None
+ elif len(results) == 1:
+ return results[0]
+ else:
+ raise rpki.exceptions.DBConsistancyError, \
+ "Database contained multiple matches for %s where %s" % \
+ (cls.__name__, where % tuple(repr(a) for a in args))
+
+ @classmethod
+ def sql_fetch_all(cls, gctx):
+ """Fetch all objects of this type from SQL."""
+ return cls.sql_fetch_where(gctx, None)
+
+ @classmethod
+ def sql_fetch_where(cls, gctx, where, args = None):
+ """Fetch objects of this type matching an arbitrary SQL WHERE expression."""
+ if where is None:
+ gctx.cur.execute(cls.sql_template.select)
+ else:
+ gctx.cur.execute(cls.sql_template.select + " WHERE " + where, args)
+ results = []
+ for row in gctx.cur.fetchall():
+ key = (cls, row[0])
+ if key in sql_cache:
+ results.append(sql_cache[key])
+ else:
+ results.append(cls.sql_init(gctx, row, key))
+ return results
+
+ @classmethod
+ def sql_init(cls, gctx, row, key):
+ """Initialize one Python object from the result of a SQL query."""
+ self = cls()
+ self.sql_decode(dict(zip(cls.sql_template.columns, row)))
+ sql_cache[key] = self
+ self.sql_in_db = True
+ self.sql_fetch_hook(gctx)
+ return self
+
+ def sql_mark_dirty(self):
+ """Mark this object as needing to be written back to SQL."""
+ sql_dirty.add(self)
+
+ def sql_mark_clean(self):
+ """Mark this object as not needing to be written back to SQL."""
+ sql_dirty.discard(self)
+
+ def sql_is_dirty(self):
+ """Query whether this object needs to be written back to SQL."""
+ return self in sql_dirty
+
+ def sql_store(self, gctx):
+ """Store this object to SQL."""
+ if not self.sql_in_db:
+ gctx.cur.execute(self.sql_template.insert, self.sql_encode())
+ setattr(self, self.sql_template.index, gctx.cur.lastrowid)
+ sql_cache[(self.__class__, gctx.cur.lastrowid)] = self
+ self.sql_insert_hook(gctx)
+ else:
+ gctx.cur.execute(self.sql_template.update, self.sql_encode())
+ self.sql_update_hook(gctx)
+ key = (self.__class__, getattr(self, self.sql_template.index))
+ assert key in sql_cache and sql_cache[key] == self
+ self.sql_mark_clean()
+ self.sql_in_db = True
+
+ def sql_delete(self, gctx):
+ """Delete this object from SQL."""
+ if self.sql_in_db:
+ id = getattr(self, self.sql_template.index)
+ gctx.cur.execute(self.sql_template.delete, id)
+ self.sql_delete_hook(gctx)
+ key = (self.__class__, id)
+ if sql_cache.get(key) == self:
+ del sql_cache[key]
+ self.sql_in_db = False
+ self.sql_mark_clean()
+
+ def sql_encode(self):
+ """Convert object attributes into a dict for use with canned SQL
+ queries. This is a default version that assumes a one-to-one
+ mapping between column names in SQL and attribute names in Python.
+ If you need something fancier, override this.
+ """
+ d = dict((a, getattr(self, a, None)) for a in self.sql_template.columns)
+ for i in self.sql_template.map:
+ if d.get(i) is not None:
+ d[i] = self.sql_template.map[i].to_sql(d[i])
+ return d
+
+ def sql_decode(self, vals):
+ """Initialize an object with values returned by self.sql_fetch().
+ This is a default version that assumes a one-to-one mapping
+ between column names in SQL and attribute names in Python. If you
+ need something fancier, override this.
+ """
+ for a in self.sql_template.columns:
+ if vals.get(a) is not None and a in self.sql_template.map:
+ setattr(self, a, self.sql_template.map[a].from_sql(vals[a]))
+ else:
+ setattr(self, a, vals[a])
+
+ def sql_fetch_hook(self, gctx):
+ """Customization hook."""
+ pass
+
+ def sql_insert_hook(self, gctx):
+ """Customization hook."""
+ pass
+
+ def sql_update_hook(self, gctx):
+ """Customization hook."""
+ self.sql_delete_hook(gctx)
+ self.sql_insert_hook(gctx)
+
+ def sql_delete_hook(self, gctx):
+ """Customization hook."""
+ pass
+
+# Some persistant objects are defined in rpki.left_right, since
+# they're also left-right PDUs. The rest are defined below, for now.
+
+class ca_obj(sql_persistant):
+ """Internal CA object."""
+
+ sql_template = template(
+ "ca", "ca_id", "last_crl_sn",
+ ("next_crl_update", rpki.sundial.datetime),
+ "last_issued_sn", "last_manifest_sn",
+ ("next_manifest_update", rpki.sundial.datetime),
+ "sia_uri", "parent_id", "parent_resource_class")
+
+ last_crl_sn = 0
+ last_issued_sn = 0
+ last_manifest_sn = 0
+
+ def parent(self, gctx):
+ """Fetch parent object to which this CA object links."""
+ return rpki.left_right.parent_elt.sql_fetch(gctx, self.parent_id)
+
+ def ca_details(self, gctx):
+ """Fetch all ca_detail objects that link to this CA object."""
+ return ca_detail_obj.sql_fetch_where(gctx, "ca_id = %s", (self.ca_id,))
+
+ def fetch_pending(self, gctx):
+ """Fetch the pending ca_details for this CA, if any."""
+ return ca_detail_obj.sql_fetch_where(gctx, "ca_id = %s AND state = 'pending'", (self.ca_id,))
+
+ def fetch_active(self, gctx):
+ """Fetch the active ca_detail for this CA, if any."""
+ return ca_detail_obj.sql_fetch_where1(gctx, "ca_id = %s AND state = 'active'", (self.ca_id,))
+
+ def fetch_deprecated(self, gctx):
+ """Fetch deprecated ca_details for this CA, if any."""
+ return ca_detail_obj.sql_fetch_where(gctx, "ca_id = %s AND state = 'deprecated'", (self.ca_id,))
+
+ def fetch_revoked(self, gctx):
+ """Fetch revoked ca_details for this CA, if any."""
+ return ca_detail_obj.sql_fetch_where(gctx, "ca_id = %s AND state = 'revoked'", (self.ca_id,))
+
+ def construct_sia_uri(self, gctx, parent, rc):
+ """Construct the sia_uri value for this CA given configured
+ information and the parent's up-down protocol list_response PDU.
+ """
+
+ repository = parent.repository(gctx)
+ sia_uri = rc.suggested_sia_head and rc.suggested_sia_head.rsync()
+ if not sia_uri or not sia_uri.startswith(parent.sia_base):
+ sia_uri = parent.sia_base
+ elif not sia_uri.endswith("/"):
+ raise rpki.exceptions.BadURISyntax, "SIA URI must end with a slash: %s" % sia_uri
+ return sia_uri + str(self.ca_id) + "/"
+
+ def check_for_updates(self, gctx, parent, rc):
+ """Parent has signaled continued existance of a resource class we
+ already knew about, so we need to check for an updated
+ certificate, changes in resource coverage, revocation and reissue
+ with the same key, etc.
+ """
+
+ sia_uri = self.construct_sia_uri(gctx, parent, rc)
+ sia_uri_changed = self.sia_uri != sia_uri
+ if sia_uri_changed:
+ self.sia_uri = sia_uri
+ self.sql_mark_dirty()
+
+ rc_resources = rc.to_resource_bag()
+ cert_map = dict((c.cert.get_SKI(), c) for c in rc.certs)
+
+ for ca_detail in ca_detail_obj.sql_fetch_where(gctx, "ca_id = %s AND latest_ca_cert IS NOT NULL AND state != 'revoked'", (self.ca_id,)):
+ ski = ca_detail.latest_ca_cert.get_SKI()
+ if ca_detail.state in ("pending", "active"):
+ current_resources = ca_detail.latest_ca_cert.get_3779resources()
+ if sia_uri_changed or \
+ ca_detail.latest_ca_cert != cert_map[ski].cert or \
+ current_resources.undersized(rc_resources) or \
+ current_resources.oversized(rc_resources):
+ ca_detail.update(
+ gctx = gctx,
+ parent = parent,
+ ca = self,
+ rc = rc,
+ sia_uri_changed = sia_uri_changed,
+ old_resources = current_resources)
+ del cert_map[ski]
+ assert not cert_map, "Certificates in list_response missing from our database, SKIs %s" % ", ".join(c.cert.hSKI() for c in cert_map.values())
+
+ @classmethod
+ def create(cls, gctx, parent, rc):
+ """Parent has signaled existance of a new resource class, so we
+ need to create and set up a corresponding CA object.
+ """
+
+ self = cls()
+ self.parent_id = parent.parent_id
+ self.parent_resource_class = rc.class_name
+ self.sql_store(gctx)
+ self.sia_uri = self.construct_sia_uri(gctx, parent, rc)
+ ca_detail = ca_detail_obj.create(gctx, self)
+
+ # This will need a callback when we go event-driven
+ issue_response = rpki.up_down.issue_pdu.query(gctx, parent, self, ca_detail)
+
+ ca_detail.activate(
+ gctx = gctx,
+ ca = self,
+ cert = issue_response.payload.classes[0].certs[0].cert,
+ uri = issue_response.payload.classes[0].certs[0].cert_url)
+
+ def delete(self, gctx, parent):
+ """The list of current resource classes received from parent does
+ not include the class corresponding to this CA, so we need to
+ delete it (and its little dog too...).
+
+ All certs published by this CA are now invalid, so need to
+ withdraw them, the CRL, and the manifest from the repository,
+ delete all child_cert and ca_detail records associated with this
+ CA, then finally delete this CA itself.
+ """
+
+ repository = parent.repository(gctx)
+ for ca_detail in self.ca_details(gctx):
+ ca_detail.delete(gctx, ca, repository)
+ self.sql_delete(gctx)
+
+ def next_serial_number(self):
+ """Allocate a certificate serial number."""
+ self.last_issued_sn += 1
+ self.sql_mark_dirty()
+ return self.last_issued_sn
+
+ def next_manifest_number(self):
+ """Allocate a manifest serial number."""
+ self.last_manifest_sn += 1
+ self.sql_mark_dirty()
+ return self.last_manifest_sn
+
+ def next_crl_number(self):
+ """Allocate a CRL serial number."""
+ self.last_crl_sn += 1
+ self.sql_mark_dirty()
+ return self.last_crl_sn
+
+ def rekey(self, gctx):
+ """Initiate a rekey operation for this ca.
+
+ Tasks:
+
+ - Generate a new keypair.
+
+ - Request cert from parent using new keypair.
+
+ - Mark result as our active ca_detail.
+
+ - Reissue all child certs issued by this ca using the new ca_detail.
+ """
+
+ rpki.log.trace()
+
+ parent = self.parent(gctx)
+ old_detail = self.fetch_active(gctx)
+ new_detail = ca_detail_obj.create(gctx, self)
+
+ # This will need a callback when we go event-driven
+ issue_response = rpki.up_down.issue_pdu.query(gctx, parent, self, new_detail)
+
+ new_detail.activate(
+ gctx = gctx,
+ ca = self,
+ cert = issue_response.payload.classes[0].certs[0].cert,
+ uri = issue_response.payload.classes[0].certs[0].cert_url,
+ predecessor = old_detail)
+
+ def revoke(self, gctx):
+ """Revoke deprecated ca_detail objects associated with this ca."""
+
+ rpki.log.trace()
+
+ for ca_detail in self.fetch_deprecated(gctx):
+ ca_detail.revoke(gctx)
+
+class ca_detail_obj(sql_persistant):
+ """Internal CA detail object."""
+
+ sql_template = template(
+ "ca_detail",
+ "ca_detail_id",
+ ("private_key_id", rpki.x509.RSA),
+ ("public_key", rpki.x509.RSApublic),
+ ("latest_ca_cert", rpki.x509.X509),
+ ("manifest_private_key_id", rpki.x509.RSA),
+ ("manifest_public_key", rpki.x509.RSApublic),
+ ("latest_manifest_cert", rpki.x509.X509),
+ ("latest_manifest", rpki.x509.SignedManifest),
+ ("latest_crl", rpki.x509.CRL),
+ "state",
+ "ca_cert_uri",
+ "ca_id")
+
+ def sql_decode(self, vals):
+ """Extra assertions for SQL decode of a ca_detail_obj."""
+ sql_persistant.sql_decode(self, vals)
+ assert (self.public_key is None and self.private_key_id is None) or \
+ self.public_key.get_DER() == self.private_key_id.get_public_DER()
+ assert (self.manifest_public_key is None and self.manifest_private_key_id is None) or \
+ self.manifest_public_key.get_DER() == self.manifest_private_key_id.get_public_DER()
+
+ def ca(self, gctx):
+ """Fetch CA object to which this ca_detail links."""
+ return ca_obj.sql_fetch(gctx, self.ca_id)
+
+ def child_certs(self, gctx, child = None, ski = None, revoked = False, unique = False):
+ """Fetch all child_cert objects that link to this ca_detail."""
+ return rpki.sql.child_cert_obj.fetch(gctx, child, self, ski, revoked, unique)
+
+ def route_origins(self, gctx):
+ """Fetch all route_origin objects that link to this ca_detail."""
+ return rpki.left_right.route_origin_elt.sql_fetch_where(gctx, "ca_detail_id = %s", (self.ca_detail_id,))
+
+ def crl_uri(self, ca):
+ """Return publication URI for this ca_detail's CRL."""
+ return ca.sia_uri + self.public_key.gSKI() + ".crl"
+
+ def manifest_uri(self, ca):
+ """Return publication URI for this ca_detail's manifest."""
+ return ca.sia_uri + self.public_key.gSKI() + ".mnf"
+
+ def activate(self, gctx, ca, cert, uri, predecessor = None):
+ """Activate this ca_detail."""
+
+ self.latest_ca_cert = cert
+ self.ca_cert_uri = uri.rsync()
+ self.generate_manifest_cert(ca)
+ self.generate_crl(gctx)
+ self.generate_manifest(gctx)
+ self.state = "active"
+ self.sql_mark_dirty()
+
+ if predecessor is not None:
+ predecessor.state = "deprecated"
+ predecessor.sql_mark_dirty()
+ for child_cert in predecessor.child_certs(gctx):
+ child_cert.reissue(gctx, self)
+
+ def delete(self, gctx, ca, repository):
+ """Delete this ca_detail and all of its associated child_cert objects."""
+
+ for child_cert in self.child_certs(gctx):
+ repository.withdraw(gctx, child_cert.cert, child_cert.uri(ca))
+ child_cert.sql_delete(gctx)
+ for child_cert in self.child_certs(gctx, revoked = True):
+ child_cert.sql_delete(gctx)
+ repository.withdraw(gctx, self.latest_manifest, self.manifest_uri(ca))
+ repository.withdraw(gctx, self.latest_crl, self.crl_uri())
+ self.sql_delete(gctx)
+
+ def revoke(self, gctx):
+ """Request revocation of all certificates whose SKI matches the key for this ca_detail.
+
+ Tasks:
+
+ - Request revocation of old keypair by parent.
+
+ - Revoke all child certs issued by the old keypair.
+
+ - Generate a final CRL, signed with the old keypair, listing all
+ the revoked certs, with a next CRL time after the last cert or
+ CRL signed by the old keypair will have expired.
+
+ - Destroy old keypair (and manifest keypair).
+
+ - Leave final CRL in place until its next CRL time has passed.
+ """
+
+ # This will need a callback when we go event-driven
+ r_msg = rpki.up_down.revoke_pdu.query(gctx, self)
+
+ if r_msg.payload.ski != self.latest_ca_cert.gSKI():
+ raise rpki.exceptions.SKIMismatch
+
+ ca = self.ca(gctx)
+ parent = ca.parent(gctx)
+ crl_interval = rpki.sundial.timedelta(seconds = parent.self(gctx).crl_interval)
+
+ nextUpdate = rpki.sundial.datetime.utcnow()
+
+ if self.latest_manifest is not None:
+ nextUpdate = nextUpdate.later(self.latest_manifest.getNextUpdate())
+
+ if self.latest_crl is not None:
+ nextUpdate = nextUpdate.later(self.latest_crl.getNextUpdate())
+
+ for child_cert in self.child_certs(gctx):
+ nextUpdate = nextUpdate.later(child_cert.cert.getNotAfter())
+ child_cert.revoke(gctx)
+
+ nextUpdate += crl_interval
+
+ self.generate_crl(gctx, nextUpdate)
+ self.generate_manifest(gctx, nextUpdate)
+
+ self.private_key_id = None
+ self.manifest_private_key_id = None
+ self.manifest_public_key = None
+ self.latest_manifest_cert = None
+ self.state = "revoked"
+ self.sql_mark_dirty()
+
+ def update(self, gctx, parent, ca, rc, sia_uri_changed, old_resources):
+ """Need to get a new certificate for this ca_detail and perhaps
+ frob children of this ca_detail.
+ """
+
+ # This will need a callback when we go event-driven
+ issue_response = rpki.up_down.issue_pdu.query(gctx, parent, ca, self)
+
+ self.latest_ca_cert = issue_response.payload.classes[0].certs[0].cert
+ new_resources = self.latest_ca_cert.get_3779resources()
+
+ if sia_uri_changed or old_resources.oversized(new_resources):
+ for child_cert in self.child_certs(gctx):
+ child_resources = child_cert.cert.get_3779resources()
+ if sia_uri_changed or child_resources.oversized(new_resources):
+ child_cert.reissue(
+ gctx = gctx,
+ ca_detail = self,
+ resources = child_resources.intersection(new_resources))
+
+ @classmethod
+ def create(cls, gctx, ca):
+ """Create a new ca_detail object for a specified CA."""
+ self = cls()
+ self.ca_id = ca.ca_id
+ self.state = "pending"
+
+ self.private_key_id = rpki.x509.RSA()
+ self.private_key_id.generate()
+ self.public_key = self.private_key_id.get_RSApublic()
+
+ self.manifest_private_key_id = rpki.x509.RSA()
+ self.manifest_private_key_id.generate()
+ self.manifest_public_key = self.manifest_private_key_id.get_RSApublic()
+
+ self.sql_store(gctx)
+ return self
+
+ def generate_manifest_cert(self, ca):
+ """Generate a new manifest certificate for this ca_detail."""
+
+ resources = rpki.resource_set.resource_bag(
+ as = rpki.resource_set.resource_set_as("<inherit>"),
+ v4 = rpki.resource_set.resource_set_ipv4("<inherit>"),
+ v6 = rpki.resource_set.resource_set_ipv6("<inherit>"))
+
+ self.latest_manifest_cert = self.latest_ca_cert.issue(
+ keypair = self.private_key_id,
+ subject_key = self.manifest_public_key,
+ serial = ca.next_manifest_number(),
+ sia = None,
+ aia = self.ca_cert_uri,
+ crldp = self.crl_uri(ca),
+ resources = resources,
+ notAfter = self.latest_ca_cert.getNotAfter(),
+ is_ca = False)
+
+ def issue(self, gctx, ca, child, subject_key, sia, resources, child_cert = None):
+ """Issue a new certificate to a child. Optional child_cert
+ argument specifies an existing child_cert object to update in
+ place; if not specified, we create a new one. Returns the
+ child_cert object containing the newly issued cert.
+ """
+
+ assert child_cert is None or (child_cert.child_id == child.child_id and
+ child_cert.ca_detail_id == self.ca_detail_id)
+
+ cert = self.latest_ca_cert.issue(
+ keypair = self.private_key_id,
+ subject_key = subject_key,
+ serial = ca.next_serial_number(),
+ aia = self.ca_cert_uri,
+ crldp = self.crl_uri(ca),
+ sia = sia,
+ resources = resources,
+ notAfter = resources.valid_until)
+
+ if child_cert is None:
+ child_cert = rpki.sql.child_cert_obj(
+ child_id = child.child_id,
+ ca_detail_id = self.ca_detail_id,
+ cert = cert)
+ rpki.log.debug("Created new child_cert %s" % repr(child_cert))
+ else:
+ child_cert.cert = cert
+ rpki.log.debug("Reusing existing child_cert %s" % repr(child_cert))
+
+ child_cert.ski = cert.get_SKI()
+
+ child_cert.sql_store(gctx)
+
+ ca.parent(gctx).repository(gctx).publish(gctx, child_cert.cert, child_cert.uri(ca))
+
+ self.generate_manifest(gctx)
+
+ return child_cert
+
+ def generate_crl(self, gctx, nextUpdate = None):
+ """Generate a new CRL for this ca_detail. At the moment this is
+ unconditional, that is, it is up to the caller to decide whether a
+ new CRL is needed.
+ """
+
+ ca = self.ca(gctx)
+ parent = ca.parent(gctx)
+ repository = parent.repository(gctx)
+ crl_interval = rpki.sundial.timedelta(seconds = parent.self(gctx).crl_interval)
+ now = rpki.sundial.datetime.utcnow()
+
+ if nextUpdate is None:
+ nextUpdate = now + crl_interval
+
+ certlist = []
+ for child_cert in self.child_certs(gctx, revoked = True):
+ if now > child_cert.cert.getNotAfter() + crl_interval:
+ child_cert.sql_delete()
+ else:
+ certlist.append((child_cert.cert.getSerial(), child_cert.revoked.toASN1tuple(), ()))
+ certlist.sort()
+
+ self.latest_crl = rpki.x509.CRL.generate(
+ keypair = self.private_key_id,
+ issuer = self.latest_ca_cert,
+ serial = ca.next_crl_number(),
+ thisUpdate = now,
+ nextUpdate = nextUpdate,
+ revokedCertificates = certlist)
+
+ repository.publish(gctx, self.latest_crl, self.crl_uri(ca))
+
+ def generate_manifest(self, gctx, nextUpdate = None):
+ """Generate a new manifest for this ca_detail."""
+
+ ca = self.ca(gctx)
+ parent = ca.parent(gctx)
+ repository = parent.repository(gctx)
+ crl_interval = rpki.sundial.timedelta(seconds = parent.self(gctx).crl_interval)
+ now = rpki.sundial.datetime.utcnow()
+
+ if nextUpdate is None:
+ nextUpdate = now + crl_interval
+
+ certs = self.child_certs(gctx)
+
+ m = rpki.x509.SignedManifest()
+ m.build(
+ serial = ca.next_manifest_number(),
+ thisUpdate = now,
+ nextUpdate = nextUpdate,
+ names_and_objs = [(c.uri_tail(), c.cert) for c in certs],
+ keypair = self.manifest_private_key_id,
+ certs = rpki.x509.X509_chain(self.latest_manifest_cert))
+ self.latest_manifest = m
+
+ repository.publish(gctx, self.latest_manifest, self.manifest_uri(ca))
+
+class child_cert_obj(sql_persistant):
+ """Certificate that has been issued to a child."""
+
+ sql_template = template("child_cert", "child_cert_id", ("cert", rpki.x509.X509), "child_id", "ca_detail_id", "ski", ("revoked", rpki.sundial.datetime))
+
+ def __init__(self, child_id = None, ca_detail_id = None, cert = None):
+ """Initialize a child_cert_obj."""
+ self.child_id = child_id
+ self.ca_detail_id = ca_detail_id
+ self.cert = cert
+ self.revoked = None
+ if child_id or ca_detail_id or cert:
+ self.sql_mark_dirty()
+
+ def child(self, gctx):
+ """Fetch child object to which this child_cert object links."""
+ return rpki.left_right.child_elt.sql_fetch(gctx, self.child_id)
+
+ def ca_detail(self, gctx):
+ """Fetch ca_detail object to which this child_cert object links."""
+ return ca_detail_obj.sql_fetch(gctx, self.ca_detail_id)
+
+ def uri_tail(self):
+ """Return the tail (filename) portion of the URI for this child_cert."""
+ return self.cert.gSKI() + ".cer"
+
+ def uri(self, ca):
+ """Return the publication URI for this child_cert."""
+ return ca.sia_uri + self.uri_tail()
+
+ def revoke(self, gctx):
+ """Mark a child cert as revoked."""
+ if self.revoked is None:
+ rpki.log.debug("Revoking %s" % repr(self))
+ self.revoked = rpki.sundial.datetime.utcnow()
+ ca = self.ca_detail(gctx).ca(gctx)
+ repository = ca.parent(gctx).repository(gctx)
+ repository.withdraw(gctx, self.cert, self.uri(ca))
+ self.sql_mark_dirty()
+
+ def reissue(self, gctx, ca_detail, resources = None, sia = None):
+ """Reissue an existing cert, reusing the public key. If the cert
+ we would generate is identical to the one we already have, we just
+ return the one we already have. If we have to revoke the old
+ certificate when generating the new one, we have to generate a new
+ child_cert_obj, so calling code that needs the updated
+ child_cert_obj must use the return value from this method.
+ """
+
+ ca = ca_detail.ca(gctx)
+ child = self.child(gctx)
+
+ old_resources = self.cert.get_3779resources()
+ old_sia = self.cert.get_SIA()
+ old_ca_detail = self.ca_detail(gctx)
+
+ if resources is None:
+ resources = old_resources
+
+ if sia is None:
+ sia = old_sia
+
+ assert resources.valid_until is not None and old_resources.valid_until is not None
+
+ if resources == old_resources and sia == old_sia and ca_detail == old_ca_detail:
+ return self
+
+ must_revoke = old_resources.oversized(resources) or old_resources.valid_until > resources.valid_until
+ new_issuer = ca_detail != old_ca_detail
+
+ if resources.valid_until != old_resources.valid_until:
+ rpki.log.debug("Validity changed: %s %s" % ( old_resources.valid_until, resources.valid_until))
+
+ if must_revoke or new_issuer:
+ child_cert = None
+ else:
+ child_cert = self
+
+ child_cert = ca_detail.issue(
+ gctx = gctx,
+ ca = ca,
+ child = child,
+ subject_key = self.cert.getPublicKey(),
+ sia = sia,
+ resources = resources,
+ child_cert = child_cert)
+
+ if must_revoke:
+ for cert in child.child_certs(gctx = gctx, ca_detail = ca_detail, ski = self.ski):
+ if cert is not child_cert:
+ cert.revoke(gctx)
+
+ return child_cert
+
+ @classmethod
+ def fetch(cls, gctx, child = None, ca_detail = None, ski = None, revoked = False, unique = False):
+ """Fetch all child_cert objects matching a particular set of
+ parameters. This is a wrapper to consolidate various queries that
+ would otherwise be inline SQL WHERE expressions. In most cases
+ code calls this indirectly, through methods in other classes.
+ """
+
+ args = []
+ where = "revoked IS"
+ if revoked:
+ where += " NOT"
+ where += " NULL"
+ if child:
+ where += " AND child_id = %s"
+ args.append(child.child_id)
+ if ca_detail:
+ where += " AND ca_detail_id = %s"
+ args.append(ca_detail.ca_detail_id)
+ if ski:
+ where += " AND ski = %s"
+ args.append(ski)
+ if unique:
+ return cls.sql_fetch_where1(gctx, where, args)
+ else:
+ return cls.sql_fetch_where(gctx, where, args)
ref='/sra/rpki.net/commit/rpki/config.py?id=2a598fa8adb5ae40eb05aa2406d523f2a2155790'>2a598fa8
5966fae5

2a598fa8

5966fae5
2a598fa8

a65c4cf0
01697787





































































1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
      
 



                                                                                   
 


                                                                        
 







                                                                      
 


                                                                    

   
                   
               
              



                       

         
 

                                    


                                                                      
    

                                                                          
                   
                           
 
                         

                                                           
                               
 
 
                     
       
                                            
 
                                                                              
 

                                                                       
 
                                                             
 



                                                                        
 






                                                                       
       
 

                                                                       
 




                                                                 
                                                         
 

                                                                               
 

                                                        
 

                                                 
 
                                                                                    
                                  
                                    
 







                                               
 
 



                                      
 
                                            
 
 



                                                 
 


                                                   
 
 


                                                                  
 

                                                                       
 





                                                
                                                       
                                                                         








































                                                                            
                               



                                                 
                                                             



















                                                                
                                                   









                                             

                                                                                   
 

                                                 
 

                                                                        
 

                                   

















                                                                          

                                                            
 






                                                                          
                                                                    











                                                                       
                                             















                                                                             
                             



                                                                   

















                                                                         





                                                   























                                                                        

















































                                                                                          


                                                                                          


                                                
                                         


                                                
                                         




                                                                                            

                                                                         


                                                              
                                                                    




                                                               
                                                                    



















                                                                          










                                                                         














                                                                                         

                                                                            





                                                                                                  

                                                                           















                                                                                                     












                                                                                                                                                                 
               





                                                                                                                                    
















                                                                                          




                                                                        
     
                                                                       

























                                                                                    

                                                                                 

                                             
                                           

                                                           
              





































































                                                                                      
# $Id$
#
# Copyright (C) 2015-2016  Parsons Government Services ("PARSONS")
# Portions copyright (C) 2013-2014  Dragon Research Labs ("DRL")
# Portions copyright (C) 2009-2012  Internet Systems Consortium ("ISC")
# Portions copyright (C) 2007-2008  American Registry for Internet Numbers ("ARIN")
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notices and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND PARSONS, DRL, ISC, AND ARIN
# DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.  IN NO EVENT
# SHALL PARSONS, DRL, ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

"""
Configuration file parsing utilities, layered on top of stock Python
ConfigParser module.
"""

import ConfigParser
import argparse
import logging
import logging.handlers
import traceback
import time
import sys
import os
import re

logger = logging.getLogger(__name__)

## @var default_filename
# Default name of config file if caller doesn't specify one explictly.

try:
    import rpki.autoconf
    default_filename = os.path.join(rpki.autoconf.sysconfdir, "rpki.conf")
except ImportError:
    default_filename = None

## @var rpki_conf_envname
# Name of environment variable containing config file name.

rpki_conf_envname = "RPKI_CONF"


class parser(object):
    """
    Extensions to stock Python ConfigParser:

    Read config file and set default section while initializing parser object.

    Support for OpenSSL-style subscripted options and a limited form of
    OpenSSL-style indirect variable references (${section::option}).

    get-methods with default values and default section name.

    If no filename is given to the constructor (filename and
    set_filename both None), we check for an environment variable naming
    the config file, then finally we check for a global config file if
    autoconf provided a directory name to check.

    NB: Programs which accept a configuration filename on the command
    lines should pass that filename using set_filename so that we can
    set the magic environment variable.  Constraints from some external
    libraries (principally Django) sometimes require library code to
    look things up in the configuration file without the knowledge of
    the controlling program, but setting the environment variable
    insures that everybody's reading from the same script, as it were.
    """

    # Odd keyword-only calling sequence is a defense against old code
    # that thinks it knows how __init__() handles positional arguments.

    def __init__(self, **kwargs):
        section       = kwargs.pop("section",       None)
        allow_missing = kwargs.pop("allow_missing", False)
        set_filename  = kwargs.pop("set_filename",  None)
        filename      = kwargs.pop("filename",      set_filename)
        argparser     = kwargs.pop("argparser",     None)

        assert not kwargs, "Unexpected keyword arguments: {}".format(
            ", ".join("{} = {!r}".format(k, v) for k, v in kwargs.iteritems()))

        if set_filename is not None:
            os.environ[rpki_conf_envname] = set_filename

        self.cfg = ConfigParser.RawConfigParser()
        self.default_section = section

        self.filename = filename or os.getenv(rpki_conf_envname) or default_filename
        self.argparser = argparser
        self.logging_defaults = None

        try:
            with open(self.filename, "r") as f:
                self.cfg.readfp(f)
        except IOError:
            if allow_missing:
                self.filename = None
            else:
                raise


    def has_section(self, section):
        """
        Test whether a section exists.
        """

        return self.cfg.has_section(section)


    def has_option(self, option, section = None):
        """
        Test whether an option exists.
        """

        if section is None:
            section = self.default_section
        return self.cfg.has_option(section, option)


    def multiget(self, option, section = None):
        """
        Parse OpenSSL-style foo.0, foo.1, ... subscripted options.

        Returns iteration of values matching the specified option name.
        """

        matches = []
        if section is None:
            section = self.default_section
        if self.cfg.has_option(section, option):
            yield self.cfg.get(section, option)
        option += "."
        matches = [o for o in self.cfg.options(section)
                   if o.startswith(option) and o[len(option):].isdigit()]
        matches.sort()
        for option in matches:
            yield self.cfg.get(section, option)


    _regexp = re.compile("\\${(.*?)::(.*?)}")

    def _repl(self, m):
        """
        Replacement function for indirect variable substitution.
        This is intended for use with re.subn().
        """

        section, option = m.group(1, 2)
        if section == "ENV":
            return os.getenv(option, "")
        else:
            return self.cfg.get(section, option)


    def get(self, option, default = None, section = None):
        """
        Get an option, perhaps with a default value.
        """

        if section is None:
            section = self.default_section
        if default is not None and not self.cfg.has_option(section, option):
            return default
        val = self.cfg.get(section, option)
        while True:
            val, modified = self._regexp.subn(self._repl, val, 1)
            if not modified:
                return val


    def getboolean(self, option, default = None, section = None):
        """
        Get a boolean option, perhaps with a default value.
        """

        # pylint: disable=W0212
        v = self.get(option, default, section)
        if isinstance(v, str):
            v = v.lower()
            if v not in self.cfg._boolean_states:
                raise ValueError("Not boolean: {}".format(v))
            v = self.cfg._boolean_states[v]
        return v


    def getint(self, option, default = None, section = None):
        """
        Get an integer option, perhaps with a default value.
        """

        return int(self.get(option, default, section))


    def getlong(self, option, default = None, section = None):
        """
        Get a long integer option, perhaps with a default value.
        """

        return long(self.get(option, default, section))


    def _get_argument_default(self, names, kwargs):
        section = kwargs.pop("section", None)
        default = kwargs.pop("default", None)

        for name in names:
            if name.startswith("--"):
                name = name[2:]
                break
        else:
            raise ValueError

        if self.has_option(option = name, section = section):
            default = self.get(option = name, section = section, default = default)

            if "type" in kwargs:
                default = kwargs["type"](default)

            if "choices" in kwargs and default not in kwargs["choices"]:
                raise ValueError

        kwargs["default"] = default

        return name, default, kwargs


    def add_argument(self, *names, **kwargs):
        """
        Combined command line and config file argument.  Takes
        arguments mostly like ArgumentParser.add_argument(), but also
        looks in config file for option of the same name.

        The "section" and "default" arguments are used for the config file
        lookup; the resulting value is used as the "default" parameter for
        the argument parser.

        If a "type" argument is specified, it applies to both the value
        parsed from the config file and the argument parser.
        """

        name, default, kwargs = self._get_argument_default(names, kwargs)
        return self.argparser.add_argument(*names, **kwargs)


    def add_boolean_argument(self, name, **kwargs):
        """
        Combined command line and config file boolean argument.  Takes
        arguments mostly like ArgumentParser.add_argument(), but also
        looks in config file for option of the same name.

        The "section" and "default" arguments are used for the config file
        lookup; the resulting value is used as the default value for
        the argument parser.

        Usage is a bit different from the normal ArgumentParser boolean
        handling: because the command line default is controlled by the
        config file, the "store_true" / "store_false" semantics don't
        really work for us.  So, instead, we use the --foo / --no-foo
        convention, and generate a pair of command line arguments with
        those names controlling a single "foo" value in the result.
        """

        section = kwargs.pop("section", None)
        default = kwargs.pop("default", None)
        help    = kwargs.pop("help",    None)

        if not name.startswith("--"):
            raise ValueError
        name = name[2:]

        default = self.getboolean(name, default = default, section = section)

        kwargs["action"] = "store_const"
        kwargs["dest"] = name.replace("-", "_")

        group = self.argparser.add_mutually_exclusive_group()

        kwargs["const"] = True
        group.add_argument("--" + name, **kwargs)

        kwargs["const"] = False
        kwargs["help"] = help
        group.add_argument("--no-" + name, **kwargs)

        self.argparser.set_defaults(**{ kwargs["dest"] : default })


    def _add_logging_argument(self, *names, **kwargs):
        group = kwargs.pop("group", self.argparser)
        name, default, kwargs = self._get_argument_default(names, kwargs)
        setattr(self.logging_defaults, name.replace("-", "_"), default)
        if group is not None:
            group.add_argument(*names, **kwargs)


    def add_logging_arguments(self, section = None):
        """
        Set up standard logging-related arguments.  This can be called
        even when we're not going to parse the command line (eg,
        because we're a WSGI app and therefore don't have a command
        line), to handle whacking arguments from the config file into
        the format that the logging setup code expects to see.
        """

        self.logging_defaults = argparse.Namespace(
            default_log_destination = None)

        if self.argparser is not None:
            self.argparser.set_defaults(
                default_log_destination = None)

        class non_negative_integer(int):
            def __init__(self, value):
                if self < 0:
                    raise ValueError

        class positive_integer(int):
            def __init__(self, value):
                if self <= 0:
                    raise ValueError

        if self.argparser is None:
            limit_group = None
        else:
            limit_group = self.argparser.add_mutually_exclusive_group()

        self._add_logging_argument(
            "--log-level",
            default = "warning",
            choices = ("debug", "info", "warning", "error", "critical"),
            help    = "how verbosely to log")

        self._add_logging_argument(
            "--log-destination",
            choices = ("syslog", "stdout", "stderr", "file"),
            help    = "logging mechanism to use")

        self._add_logging_argument(
            "--log-filename",
            help    = "where to log when log destination is \"file\"")

        self._add_logging_argument(
            "--log-facility",
            default = "daemon",
            choices = sorted(logging.handlers.SysLogHandler.facility_names.keys()),
            help    = "syslog facility to use when log destination is \"syslog\"")

        self._add_logging_argument(
            "--log-count",
            default = "7",
            type    = positive_integer,
            help    = "how many logs to keep when rotating for log destination \"file\""),

        self._add_logging_argument(
            "--log-size-limit",
            group   = limit_group,
            default = 0,
            type    = non_negative_integer,
            help    = "size in kbytes after which to rotate log for destination \"file\"")

        self._add_logging_argument(
            "--log-time-limit",
            group   = limit_group,
            default = 0,
            type    = non_negative_integer,
            help    = "hours after which to rotate log for destination \"file\"")


    def configure_logging(self, args = None, ident = None):
        """
        Configure the logging system, using information from both the
        config file and the command line; if this particular program
        doesn't use the command line (eg, a WSGI app), we just use the
        config file.
        """

        if self.logging_defaults is None:
            self.add_logging_arguments()

        if args is None:
            args = self.logging_defaults

        log_level = getattr(logging, args.log_level.upper())

        log_destination = args.log_destination or args.default_log_destination or "stderr"

        if log_destination == "stderr":
            log_handler = logging.StreamHandler(
                stream = sys.stderr)

        elif log_destination == "stdout":
            log_handler = logging.StreamHandler(
                stream = sys.stdout)

        elif log_destination == "syslog":
            log_handler = logging.handlers.SysLogHandler(
                address = ("/dev/log" if os.path.exists("/dev/log")
                           else ("localhost", logging.handlers.SYSLOG_UDP_PORT)),
                facility = logging.handlers.SysLogHandler.facility_names[args.log_facility])

        elif log_destination == "file" and (args.log_size_limit == 0 and 
                                            args.log_time_limit == 0):
            log_handler = logging.handlers.WatchedFileHandler(
                filename = args.log_filename)

        elif log_destination == "file" and args.log_time_limit == 0:
            log_handler = logging.handlers.RotatingFileHandler(
                filename    = args.log_filename,
                maxBytes    = args.log_size_limit * 1024,
                backupCount = args.log_count)

        elif log_destination == "file" and args.log_size_limit == 0:
            log_handler = logging.handlers.TimedRotatingFileHandler(
                filename    = args.log_filename,
                interval    = args.log_time_limit,
                backupCount = args.log_count,
                when        = "H",
                utc         = True)
            
        else:
            raise ValueError

        if ident is None:
            ident = os.path.basename(sys.argv[0])

        log_handler.setFormatter(Formatter(ident, log_handler, log_level))

        root_logger = logging.getLogger()
        root_logger.addHandler(log_handler)
        root_logger.setLevel(log_level)


    def set_global_flags(self):
        """
        Consolidated control for all the little global control flags
        scattered through the libraries.  This isn't a particularly good
        place for this function to live, but it has to live somewhere and
        making it a method of the config parser from which it gets all of
        its data is less silly than the available alternatives.
        """

        # pylint: disable=W0621
        import rpki.x509
        import rpki.daemonize

        for line in self.multiget("configure_logger"):
            try:
                name, level = line.split()
                logging.getLogger(name).setLevel(getattr(logging, level.upper()))
            except Exception, e:
                logger.warning("Could not process configure_logger line %r: %s", line, e)

        try:
            rpki.x509.CMS_object.debug_cms_certs = self.getboolean("debug_cms_certs")
        except ConfigParser.NoOptionError:
            pass

        try:
            rpki.x509.XML_CMS_object.dump_outbound_cms = rpki.x509.DeadDrop(
                self.get("dump_outbound_cms"))
        except OSError, e:
            logger.warning("Couldn't initialize mailbox %s: %s", self.get("dump_outbound_cms"), e)
        except ConfigParser.NoOptionError:
            pass

        try:
            rpki.x509.XML_CMS_object.dump_inbound_cms = rpki.x509.DeadDrop(
                self.get("dump_inbound_cms"))
        except OSError, e:
            logger.warning("Couldn't initialize mailbox %s: %s", self.get("dump_inbound_cms"), e)
        except ConfigParser.NoOptionError:
            pass

        try:
            rpki.x509.XML_CMS_object.check_inbound_schema = self.getboolean("check_inbound_schema")
        except ConfigParser.NoOptionError:
            pass

        try:
            rpki.x509.XML_CMS_object.check_outbound_schema = self.getboolean("check_outbound_schema")
        except ConfigParser.NoOptionError:
            pass

        try:
            rpki.daemonize.default_pid_directory = self.get("pid_directory")
        except ConfigParser.NoOptionError:
            pass

        try:
            rpki.daemonize.pid_filename = self.get("pid_filename")
        except ConfigParser.NoOptionError:
            pass

        try:
            rpki.x509.generate_insecure_debug_only_rsa_key = rpki.x509.insecure_debug_only_rsa_key_generator(*self.get("insecure-debug-only-rsa-key-db").split())
        except ConfigParser.NoOptionError:
            pass
        except:
            logger.warning("insecure-debug-only-rsa-key-db configured but initialization failed, check for corrupted database file")

        try:
            rpki.up_down.content_type = self.get("up_down_content_type")
        except ConfigParser.NoOptionError:
            pass


def argparser(section = None, doc = None, cfg_optional = False):
    """
    First cut at a combined configuration mechanism based on ConfigParser and argparse.

    General idea here is to do an initial pass on the arguments to handle the config file,
    then return the config file and a parser to use for the rest of the arguments.
    """

    # Basic approach here is a variation on:
    # http://blog.vwelch.com/2011/04/combining-configparser-and-argparse.html

    # For most of our uses of argparse, this should be a trivial
    # drop-in, and should reduce the amount of repetitive code.  There
    # are a couple of special cases which will require attention:
    #
    # - rpki.rtr: The rpki-rtr modules have their own handling of all
    #   the logging setup, and use an argparse subparser.  I -think-
    #   that the way they're already handling the logging setup should
    #   work fine, but there may be a few tricky bits reconciling the
    #   rpki-rtr logging setup with the generalized version in rpki.log.
    #
    # - rpki.rpkic: Use of argparse in rpkic is very complicated due to
    #   support for both the external command line and the internal
    #   command loop.  Overall it works quite well, but the setup is
    #   tricky.  rpki.rpkic.main.top_argparse may need to move outside
    #   the main class, but that may raise its own issues.  Maybe we
    #   can get away with just replacing the current setup of
    #   top_argparser with a call to this function and otherwise
    #   leaving the whole structure alone?  Try and see, I guess.

    # Setting cfg_optional here doesn't really work, because the cfg
    # object returned here is separate from the one that the Django
    # ORM gets when it tries to look for databases.  Given that just
    # about everything which uses this module also uses Django,
    # perhaps we should just resign ourselves to the config being a
    # global thing we read exactly once, so we can stop playing this
    # game.

    topparser = argparse.ArgumentParser(add_help = False)
    topparser.add_argument("-c", "--config",
                           default = os.getenv(rpki_conf_envname, default_filename),
                           help = "override default location of configuration file")

    cfgparser = argparse.ArgumentParser(parents = [topparser], add_help = False)
    cfgparser.add_argument("-h", "--help", action = "store_true")

    args, remaining_argv = cfgparser.parse_known_args()

    argparser = argparse.ArgumentParser(parents = [topparser], description = doc)

    cfg = parser(section       = section,
                 set_filename  = args.config,
                 argparser     = argparser,
                 allow_missing = cfg_optional or args.help)

    return cfg


class Formatter(object):
    """
    Reimplementation (easier than subclassing in this case) of
    logging.Formatter.

    It turns out that the logging code only cares about this class's
    .format(record) method, everything else is internal; so long as
    .format() converts a record into a properly formatted string, the
    logging code is happy.

    So, rather than mess around with dynamically constructing and
    deconstructing and tweaking format strings and ten zillion options
    we don't use, we just provide our own implementation that supports
    what we do need.
    """

    converter = time.gmtime

    def __init__(self, ident, handler, level):
        self.ident = ident
        self.is_syslog = isinstance(handler, logging.handlers.SysLogHandler)
        self.debugging = level == logging.DEBUG

    def format(self, record):
        return "".join(self.coformat(record)).rstrip("\n")

    def coformat(self, record):

        try:
            if not self.is_syslog:
                yield time.strftime("%Y-%m-%d %H:%M:%S ", time.gmtime(record.created))
        except:
            yield "[$!$Time format failed]"

        try:
            yield "{}[{:d}]: ".format(self.ident, record.process)
        except:
            yield "[$!$ident format failed]"

        try:
            if isinstance(record.context, (str, unicode)):
                yield record.context + " "
            else:
                yield repr(record.context) + " "
        except AttributeError:
            pass
        except:
            yield "[$!$context format failed]"

        try:
            yield record.getMessage()
        except:
            yield "[$!$record.getMessage() failed]"

        try:
            if record.exc_info:
                if self.is_syslog or not self.debugging:
                    lines = traceback.format_exception_only(
                        record.exc_info[0], record.exc_info[1])
                    lines.insert(0, ": ")
                else:
                    lines = traceback.format_exception(
                        record.exc_info[0], record.exc_info[1], record.exc_info[2])
                    lines.insert(0, "\n")
                for line in lines:
                    yield line
        except:
            yield "[$!$exception formatting failed]"