diff options
Diffstat (limited to 'rpkid/portal-gui/scripts/rpkigui-rcynic.py')
-rw-r--r-- | rpkid/portal-gui/scripts/rpkigui-rcynic.py | 294 |
1 files changed, 155 insertions, 139 deletions
diff --git a/rpkid/portal-gui/scripts/rpkigui-rcynic.py b/rpkid/portal-gui/scripts/rpkigui-rcynic.py index da75aa8f..3c738675 100644 --- a/rpkid/portal-gui/scripts/rpkigui-rcynic.py +++ b/rpkid/portal-gui/scripts/rpkigui-rcynic.py @@ -1,4 +1,5 @@ # Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above @@ -23,6 +24,7 @@ import logging from django.db import transaction import django.db.models +from django.core.exceptions import ObjectDoesNotExist import rpki import rpki.gui.app.timestamp @@ -33,18 +35,135 @@ from rpki.sundial import datetime logger = logging.getLogger(__name__) -class rcynic_object(object): - def __call__(self, vs): - """Do initial processing on a rcynic_object instance.""" - logger.debug('processing %s at %s' % (vs.file_class.__name__, vs.uri)) +def rcynic_cert(cert, obj): + obj.sia = cert.sia_directory_uri + logger.debug('issuer=%s' % obj.issuer) - # rcynic will generation <validation_status/> elements for objects - # listed in the manifest but not found on disk - if os.path.exists(vs.filename): - q = self.model_class.objects.filter(uri=vs.uri) + # object must be saved for the related manager methods below to work + obj.save() + # resources can change when a cert is updated + obj.asns.clear() + obj.addresses.clear() + + for asr in cert.resources.asn: + logger.debug('processing %s' % asr) + + attrs = {'min': asr.min, 'max': asr.max} + q = models.ASRange.objects.filter(**attrs) + if not q: + obj.asns.create(**attrs) + else: + obj.asns.add(q[0]) + + for cls, addr_obj, addrset in (models.AddressRange, obj.addresses, cert.resources.v4), (models.AddressRangeV6, obj.addresses_v6, cert.resources.v6): + for rng in addrset: + logger.debug('processing %s' % rng) + + attrs = {'prefix_min': rng.min, 'prefix_max': rng.max} + q = cls.objects.filter(**attrs) + if not q: + addr_obj.create(**attrs) + else: + addr_obj.add(q[0]) + + +def rcynic_roa(roa, obj): + obj.asid = roa.asID + # object must be saved for the related manager methods below to work + obj.save() + obj.prefixes.clear() + obj.prefixes_v6.clear() + for pfxset in roa.prefix_sets: + if pfxset.__class__.__name__ == 'roa_prefix_set_ipv6': + roa_cls = models.ROAPrefixV6 + prefix_obj = obj.prefixes_v6 + else: + roa_cls = models.ROAPrefixV4 + prefix_obj = obj.prefixes + + for pfx in pfxset: + attrs = {'prefix_min': pfx.min(), + 'prefix_max': pfx.max(), + 'max_length': pfx.max_prefixlen} + q = roa_cls.objects.filter(**attrs) + if not q: + prefix_obj.create(**attrs) + else: + prefix_obj.add(q[0]) + + +def rcynic_gbr(gbr, obj): + vcard = vobject.readOne(gbr.vcard) + logger.debug(vcard.prettyPrint()) + obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None + obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None + obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None + obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None + +LABEL_CACHE = {} + + +def save_statuses(inst, statuses): + for vs in statuses: + timestamp = datetime.fromXMLtime(vs.timestamp).to_sql() + + # cache validation labels + if vs.status in LABEL_CACHE: + status = LABEL_CACHE[vs.status] + else: + status = models.ValidationLabel.objects.get(label=vs.status) + LABEL_CACHE[vs.status] = status + + g = models.generations_dict[vs.generation] if vs.generation else None + + inst.statuses.create(generation=g, timestamp=timestamp, status=status) + +@transaction.commit_on_success +def process_cache(root, xml_file): + dispatch = { + 'rcynic_certificate': rcynic_cert, + 'rcynic_roa': rcynic_roa, + 'rcynic_ghostbuster': rcynic_gbr + } + model_class = { + 'rcynic_certificate': models.Cert, + 'rcynic_roa': models.ROA, + 'rcynic_ghostbuster': models.Ghostbuster + } + + last_uri = None + statuses = [] + + logger.info('clearing validation statuses') + models.ValidationStatus.objects.all().delete() + + logger.info('updating validation status') + for vs in rcynic_xml_iterator(root, xml_file): + if vs.uri != last_uri: + if statuses: + obj, created = models.RepositoryObject.objects.get_or_create(uri=last_uri) + save_statuses(obj, statuses) + + statuses = [] + last_uri = vs.uri + + statuses.append(vs) + + if vs.status == 'object_accepted': + logger.debug('processing %s at %s' % (vs.filename, vs.uri)) + + # rcynic will generation <validation_status/> elements for objects + # listed in the manifest but not found on disk + if not os.path.exists(vs.filename): + logger.warning('file is missing: %s' % vs.filename) + continue + + cls = model_class[vs.file_class.__name__] + q = cls.objects.filter(repo__uri=vs.uri) if not q: logger.debug('creating new db instance') - inst = self.model_class(uri=vs.uri) + repo, created = models.RepositoryObject.objects.get_or_create(uri=vs.uri) + inst = cls(repo=repo) else: inst = q[0] @@ -56,7 +175,7 @@ class rcynic_object(object): obj = vs.obj # causes object to be lazily loaded except rpki.POW._der.DerError, e: logger.warning('Caught %s while processing %s: %s' % (type(e), vs.filename, e)) - return True + continue inst.not_before = obj.notBefore.to_sql() inst.not_after = obj.notAfter.to_sql() @@ -67,148 +186,41 @@ class rcynic_object(object): # look up signing cert if obj.issuer == obj.subject: # self-signed cert (TA) - inst.cert = inst + logger.debug('processing TA at %s' % vs.uri) + assert(isinstance(inst, models.Cert)) + inst.issuer = inst else: - q = models.Cert.objects.filter(keyid=obj.aki, name=obj.issuer) - if q: - inst.issuer = q[0] - else: + try: + inst.issuer = models.Cert.objects.get(keyid=obj.aki, name=obj.issuer) + except ObjectDoesNotExist: logger.warning('unable to find signing cert with ski=%s (%s)' % (obj.aki, obj.issuer)) - return None - - self.callback(obj, inst) - else: - logger.debug('object is unchanged') - - # save required to create new ValidationStatus object refering to - # it - inst.save() - inst.statuses.create(generation=models.generations_dict[vs.generation] if vs.generation else None, - timestamp=datetime.fromXMLtime(vs.timestamp).to_sql(), - status=models.ValidationLabel.objects.get(label=vs.status)) + continue - return inst - else: - logger.warning('file is missing: %s' % vs.filename) - - return True - - -class rcynic_cert(rcynic_object): - model_class = models.Cert - - def callback(self, cert, obj): - """ - Process a RPKI resource certificate. - """ + # do object-specific tasks + dispatch[vs.file_class.__name__](obj, inst) - obj.sia = cert.sia_directory_uri - obj.save() - - # resources can change when a cert is updated - obj.asns.clear() - obj.addresses.clear() - - for asr in cert.resources.asn: - logger.debug('processing %s' % asr) - - attrs = {'min': asr.min, 'max': asr.max} - q = models.ASRange.objects.filter(**attrs) - if not q: - obj.asns.create(**attrs) + inst.save() # don't require a save in the dispatch methods else: - obj.asns.add(q[0]) - - for cls, addr_obj, addrset in (models.AddressRange, obj.addresses, cert.resources.v4), (models.AddressRangeV6, obj.addresses_v6, cert.resources.v6): - for rng in addrset: - logger.debug('processing %s' % rng) - - attrs = {'prefix_min': rng.min, 'prefix_max': rng.max} - q = cls.objects.filter(**attrs) - if not q: - addr_obj.create(**attrs) - else: - addr_obj.add(q[0]) - - logger.debug('finished processing rescert at %s' % cert.uri) - - -class rcynic_roa(rcynic_object): - model_class = models.ROA - - def callback(self, roa, obj): - obj.asid = roa.asID - obj.save() - obj.prefixes.clear() - obj.prefixes_v6.clear() - for pfxset in roa.prefix_sets: - if pfxset.__class__.__name__ == 'roa_prefix_set_ipv6': - roa_cls = models.ROAPrefixV6 - prefix_obj = obj.prefixes_v6 - else: - roa_cls = models.ROAPrefixV4 - prefix_obj = obj.prefixes - - for pfx in pfxset: - attrs = {'prefix_min': pfx.min(), - 'prefix_max': pfx.max(), - 'max_length': pfx.max_prefixlen} - q = roa_cls.objects.filter(**attrs) - if not q: - prefix_obj.create(**attrs) - else: - prefix_obj.add(q[0]) - - -class rcynic_gbr(rcynic_object): - model_class = models.Ghostbuster - - def callback(self, gbr, obj): - vcard = vobject.readOne(gbr.vcard) - logger.debug(vcard.prettyPrint()) - obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None - obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None - obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None - obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None - - -def process_cache(root, xml_file): - start = time.time() + logger.debug('object is unchanged') - dispatch = { - 'rcynic_certificate': rcynic_cert(), - 'rcynic_roa': rcynic_roa(), - 'rcynic_ghostbuster': rcynic_gbr() - } + # insert the saved validation statuses now that the object has been + # created. + save_statuses(inst.repo, statuses) + statuses = [] - # remove all existing ValidationStatus_* entries - logger.info('removing existing validation status') - models.ValidationStatus_Cert.objects.all().delete() - models.ValidationStatus_ROA.objects.all().delete() - models.ValidationStatus_Ghostbuster.objects.all().delete() - - logger.info('updating validation status') - elts = rcynic_xml_iterator(root, xml_file) - for vs in elts: - with transaction.commit_on_success(): - dispatch[vs.file_class.__name__](vs) + # process any left over statuses for an object that was not ultimately + # accepted + if statuses: + obj, created = models.RepositoryObject.objects.get_or_create(uri=last_uri) + save_statuses(obj, statuses) # garbage collection # remove all objects which have no ValidationStatus references, which # means they did not appear in the last XML output logger.info('performing garbage collection') - # trying to .delete() the querysets directly results in a "too many sql - # variables" exception - for qs in (models.Cert.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0), - models.Ghostbuster.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0), - models.ROA.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0)): - for e in qs: - e.delete() - - stop = time.time() - logger.info('elapsed time %d seconds.' % (stop - start)) - + # Delete all objects that have zero validation status elements. + models.RepositoryObject.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0).delete() @transaction.commit_on_success def process_labels(xml_file): @@ -247,9 +259,13 @@ if __name__ == '__main__': logging.basicConfig() logger.info('log level set to %s' % logging.getLevelName(v)) + start = time.time() process_labels(options.logfile) process_cache(options.root, options.logfile) rpki.gui.app.timestamp.update('rcynic_import') + stop = time.time() + logger.info('elapsed time %d seconds.' % (stop - start)) + logging.shutdown() |