diff options
Diffstat (limited to 'rpkid/portal-gui/scripts/rpkigui-rcynic.py')
-rw-r--r-- | rpkid/portal-gui/scripts/rpkigui-rcynic.py | 363 |
1 files changed, 184 insertions, 179 deletions
diff --git a/rpkid/portal-gui/scripts/rpkigui-rcynic.py b/rpkid/portal-gui/scripts/rpkigui-rcynic.py index 3dc0d9bd..3205fc8d 100644 --- a/rpkid/portal-gui/scripts/rpkigui-rcynic.py +++ b/rpkid/portal-gui/scripts/rpkigui-rcynic.py @@ -1,5 +1,5 @@ -# $Id$ # Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above @@ -12,216 +12,212 @@ # LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE # OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR # PERFORMANCE OF THIS SOFTWARE. -# + +__version__ = '$Id$' default_logfile = '/var/rcynic/data/summary.xml' default_root = '/var/rcynic/data' -import time, vobject +import time +import vobject +import logging + +from django.db import transaction +import django.db.models +from django.core.exceptions import ObjectDoesNotExist + +import rpki +import rpki.gui.app.timestamp from rpki.gui.cacheview import models from rpki.rcynic import rcynic_xml_iterator, label_iterator from rpki.sundial import datetime -from django.db import transaction -import django.db.models -debug = False -fam_map = { 'roa_prefix_set_ipv6': 6, 'roa_prefix_set_ipv4': 4 } +logger = logging.getLogger(__name__) -class rcynic_object(object): - def __call__(self, vs): - """ - do initial processing on a rcynic_object instance. +def rcynic_cert(cert, obj): + obj.sia = cert.sia_directory_uri - return value is a tuple: first element is a boolean value indicating whether - the object is changed/new since the last time we processed it. second - element is the db instance. - """ - if debug: - print 'processing %s at %s' % (vs.file_class.__name__, vs.uri) + # object must be saved for the related manager methods below to work + obj.save() + # resources can change when a cert is updated + obj.asns.clear() + obj.addresses.clear() - # rcynic will generation <validation_status/> elements for objects - # listed in the manifest but not found on disk - if os.path.exists(vs.filename): - q = self.model_class.objects.filter(uri=vs.uri) + for asr in cert.resources.asn: + logger.debug('processing %s' % asr) + + attrs = {'min': asr.min, 'max': asr.max} + q = models.ASRange.objects.filter(**attrs) + if not q: + obj.asns.create(**attrs) + else: + obj.asns.add(q[0]) + + for cls, addr_obj, addrset in (models.AddressRange, obj.addresses, cert.resources.v4), (models.AddressRangeV6, obj.addresses_v6, cert.resources.v6): + for rng in addrset: + logger.debug('processing %s' % rng) + + attrs = {'prefix_min': rng.min, 'prefix_max': rng.max} + q = cls.objects.filter(**attrs) if not q: - if debug: - print 'creating new db instance' - inst = self.model_class(uri=vs.uri) + addr_obj.create(**attrs) else: - inst = q[0] + addr_obj.add(q[0]) + + +def rcynic_roa(roa, obj): + obj.asid = roa.asID + # object must be saved for the related manager methods below to work + obj.save() + obj.prefixes.clear() + obj.prefixes_v6.clear() + for pfxset in roa.prefix_sets: + if pfxset.__class__.__name__ == 'roa_prefix_set_ipv6': + roa_cls = models.ROAPrefixV6 + prefix_obj = obj.prefixes_v6 + else: + roa_cls = models.ROAPrefixV4 + prefix_obj = obj.prefixes + + for pfx in pfxset: + attrs = {'prefix_min': pfx.min(), + 'prefix_max': pfx.max(), + 'max_length': pfx.max_prefixlen} + q = roa_cls.objects.filter(**attrs) + if not q: + prefix_obj.create(**attrs) + else: + prefix_obj.add(q[0]) - # determine if the object is changed/new - mtime = os.stat(vs.filename)[8] - if mtime != inst.mtime: - inst.mtime = mtime - obj = vs.obj # causes object to be lazily loaded - inst.not_before = obj.notBefore.to_sql() - inst.not_after = obj.notAfter.to_sql() - if debug: - sys.stderr.write('name=%s ski=%s\n' % (obj.subject, obj.ski)) - inst.name = obj.subject - inst.keyid = obj.ski - # look up signing cert - if obj.issuer == obj.subject: - # self-signed cert (TA) - inst.cert = inst - else: - q = models.Cert.objects.filter(keyid=obj.aki, name=obj.issuer) - if q: - inst.issuer = q[0] - else: - sys.stderr.write('warning: unable to find signing cert with ski=%s (%s)\n' % (obj.aki, obj.issuer)) - return None - - self.callback(obj, inst) - else: - if debug: - print 'object is unchanged' +def rcynic_gbr(gbr, obj): + vcard = vobject.readOne(gbr.vcard) + logger.debug(vcard.prettyPrint()) + obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None + obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None + obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None + obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None + +LABEL_CACHE = {} - # save required to create new ValidationStatus object refering to it - inst.save() - inst.statuses.create(generation=models.generations_dict[vs.generation] if vs.generation else None, - timestamp=datetime.fromXMLtime(vs.timestamp).to_sql(), - status=models.ValidationLabel.objects.get(label=vs.status)) - return inst +def save_statuses(inst, statuses): + for vs in statuses: + timestamp = datetime.fromXMLtime(vs.timestamp).to_sql() + + # cache validation labels + if vs.status in LABEL_CACHE: + status = LABEL_CACHE[vs.status] else: - if debug: - print 'ERROR - file is missing: %s' % vs.filename + status = models.ValidationLabel.objects.get(label=vs.status) + LABEL_CACHE[vs.status] = status + + g = models.generations_dict[vs.generation] if vs.generation else None - return True + inst.statuses.create(generation=g, timestamp=timestamp, status=status) -class rcynic_cert(rcynic_object): - model_class = models.Cert +@transaction.commit_on_success +def process_cache(root, xml_file): + dispatch = { + 'rcynic_certificate': rcynic_cert, + 'rcynic_roa': rcynic_roa, + 'rcynic_ghostbuster': rcynic_gbr + } + model_class = { + 'rcynic_certificate': models.Cert, + 'rcynic_roa': models.ROA, + 'rcynic_ghostbuster': models.Ghostbuster + } - def callback(self, cert, obj): - """ - Process a RPKI resource certificate. - """ + last_uri = None + statuses = [] - obj.sia = cert.sia_directory_uri - obj.save() + logger.info('clearing validation statuses') + models.ValidationStatus.objects.all().delete() - # resources can change when a cert is updated - obj.asns.clear() - obj.addresses.clear() + logger.info('updating validation status') + for vs in rcynic_xml_iterator(root, xml_file): + if vs.uri != last_uri: + if statuses: + obj, created = models.RepositoryObject.objects.get_or_create(uri=last_uri) + save_statuses(obj, statuses) - for asr in cert.resources.asn: - if debug: - sys.stderr.write('processing %s\n' % asr) + statuses = [] + last_uri = vs.uri - attrs = { 'min': asr.min, 'max': asr.max } - q = models.ASRange.objects.filter(**attrs) + statuses.append(vs) + + if vs.status == 'object_accepted': + logger.debug('processing %s' % vs.filename) + + cls = model_class[vs.file_class.__name__] + q = cls.objects.filter(repo__uri=vs.uri) if not q: - obj.asns.create(**attrs) + repo, created = models.RepositoryObject.objects.get_or_create(uri=vs.uri) + inst = cls(repo=repo) else: - obj.asns.add(q[0]) + inst = q[0] - for family, addrset in (4, cert.resources.v4), (6, cert.resources.v6): - for rng in addrset: - if debug: - sys.stderr.write('processing %s\n' % rng) + # determine if the object is changed/new + mtime = os.stat(vs.filename)[8] + if mtime != inst.mtime: + inst.mtime = mtime + try: + obj = vs.obj # causes object to be lazily loaded + except rpki.POW._der.DerError, e: + logger.warning('Caught %s while processing %s: %s' % (type(e), vs.filename, e)) + continue - attrs = { 'family': family, 'min': str(rng.min), 'max': str(rng.max) } - q = models.AddressRange.objects.filter(**attrs) - if not q: - obj.addresses.create(**attrs) - else: - obj.addresses.add(q[0]) - - if debug: - print 'finished processing rescert at %s' % cert.uri - -class rcynic_roa(rcynic_object): - model_class = models.ROA - - def callback(self, roa, obj): - obj.asid = roa.asID - obj.save() - obj.prefixes.clear() - for pfxset in roa.prefix_sets: - family = fam_map[pfxset.__class__.__name__] - for pfx in pfxset: - attrs = { 'family' : family, - 'prefix': str(pfx.prefix), - 'bits' : pfx.prefixlen, - 'max_length': pfx.max_prefixlen } - q = models.ROAPrefix.objects.filter(**attrs) - if not q: - obj.prefixes.create(**attrs) - else: - obj.prefixes.add(q[0]) + inst.not_before = obj.notBefore.to_sql() + inst.not_after = obj.notAfter.to_sql() + inst.name = obj.subject + inst.keyid = obj.ski -class rcynic_gbr(rcynic_object): - model_class = models.Ghostbuster + # look up signing cert + if obj.issuer == obj.subject: + # self-signed cert (TA) + assert(isinstance(inst, models.Cert)) + inst.issuer = inst + else: + try: + inst.issuer = models.Cert.objects.get(keyid=obj.aki, name=obj.issuer) + except ObjectDoesNotExist: + logger.warning('unable to find signing cert with ski=%s (%s)' % (obj.aki, obj.issuer)) + continue - def callback(self, gbr, obj): - vcard = vobject.readOne(gbr.vcard) - if debug: - vcard.prettyPrint() - obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None - obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None - obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None - obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None + # do object-specific tasks + dispatch[vs.file_class.__name__](obj, inst) -def process_cache(root, xml_file): - start = time.time() + inst.save() # don't require a save in the dispatch methods + else: + logger.debug('object is unchanged') - dispatch = { - 'rcynic_certificate': rcynic_cert(), - 'rcynic_roa' : rcynic_roa(), - 'rcynic_ghostbuster': rcynic_gbr() - } + # insert the saved validation statuses now that the object has been + # created. + save_statuses(inst.repo, statuses) + statuses = [] - # remove all existing ValidationStatus_* entries - models.ValidationStatus_Cert.objects.all().delete() - models.ValidationStatus_ROA.objects.all().delete() - models.ValidationStatus_Ghostbuster.objects.all().delete() - - # loop over all rcynic objects and dispatch based on the returned - # rcynic_object subclass - n = 1 - defer = rcynic_xml_iterator(root, xml_file) - while defer: - if debug: - print 'starting iteration %d for deferred objects' % n - n = n + 1 - - elts = defer - defer = [] - for vs in elts: - # need to defer processing this object, most likely because - # the <validation_status/> element for the signing cert hasn't - # been seen yet - if not dispatch[vs.file_class.__name__](vs): - defer.append(vs) + # process any left over statuses for an object that was not ultimately + # accepted + if statuses: + obj, created = models.RepositoryObject.objects.get_or_create(uri=last_uri) + save_statuses(obj, statuses) # garbage collection # remove all objects which have no ValidationStatus references, which # means they did not appear in the last XML output - if debug: - print 'performing garbage collection' + logger.info('performing garbage collection') - # trying to .delete() the querysets directly results in a "too many sql variables" exception - for qs in (models.Cert.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0), - models.Ghostbuster.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0), - models.ROA.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0)): - for e in qs: - e.delete() - - if debug: - stop = time.time() - sys.stdout.write('elapsed time %d seconds.\n' % (stop - start)) + # Delete all objects that have zero validation status elements. + models.RepositoryObject.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0).delete() +@transaction.commit_on_success def process_labels(xml_file): - if debug: - sys.stderr.write('updating labels...\n') + logger.info('updating labels...') for label, kind, desc in label_iterator(xml_file): - if debug: - sys.stderr.write('label=%s kind=%s desc=%s\n' % (label, kind, desc)) + logger.debug('label=%s kind=%s desc=%s' % (label, kind, desc)) if kind: q = models.ValidationLabel.objects.filter(label=label) if not q: @@ -233,24 +229,33 @@ def process_labels(xml_file): obj.status = desc obj.save() + if __name__ == '__main__': import optparse parser = optparse.OptionParser() - parser.add_option("-d", "--debug", action="store_true", - help="enable debugging message") + parser.add_option("-l", "--level", dest="log_level", default='INFO', + help="specify the logging level [default: %default]") parser.add_option("-f", "--file", dest="logfile", - help="specify the rcynic XML file to parse [default: %default]", - default=default_logfile) + help="specify the rcynic XML file to parse [default: %default]", + default=default_logfile) parser.add_option("-r", "--root", - help="specify the chroot directory for the rcynic jail [default: %default]", - metavar="DIR", default=default_root) + help="specify the chroot directory for the rcynic jail [default: %default]", + metavar="DIR", default=default_root) options, args = parser.parse_args(sys.argv) - if options.debug: - debug = True - with transaction.commit_on_success(): - process_labels(options.logfile) - process_cache(options.root, options.logfile) + v = getattr(logging, options.log_level.upper()) + logger.setLevel(v) + logging.basicConfig() + logger.info('log level set to %s' % logging.getLevelName(v)) + + start = time.time() + process_labels(options.logfile) + process_cache(options.root, options.logfile) + + rpki.gui.app.timestamp.update('rcynic_import') + + stop = time.time() + logger.info('elapsed time %d seconds.' % (stop - start)) -# vim:sw=4 ts=8 + logging.shutdown() |