aboutsummaryrefslogtreecommitdiff
path: root/rpki/gui/gui_rpki_cache/util.py
blob: 0bc4fa5dfaf6101ac305030a771a4a8d54207d45 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
# Copyright (C) 2011  SPARTA, Inc. dba Cobham
# Copyright (C) 2012, 2013, 2016  SPARTA, Inc. a Parsons Company
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS.  IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.

__version__ = '$Id: util.py 6335 2016-03-29 03:09:13Z sra $'

import logging
import time
import vobject
from socket import getfqdn
from cStringIO import StringIO

if __name__ == '__main__':
    import os
    logging.basicConfig(level=logging.DEBUG)
    os.environ.update(DJANGO_SETTINGS_MODULE='rpki.django_settings.gui')
    import django
    django.setup()

import os.path

logger = logging.getLogger(__name__)

from django.db import transaction
import django.db.models

import rpki
import rpki.resource_set
import rpki.left_right
import rpki.gui.app.timestamp
from rpki.gui.app.models import Conf, Alert
from rpki.gui.gui_rpki_cache import models
from rpki.irdb.zookeeper import Zookeeper

from lxml.etree import Element, SubElement


def process_certificate(auth, obj):
    cert = models.Cert.objects.filter(ski=obj.ski).first()
    if cert:
        logger.debug('cache hit for CA cert uri=%s ski=%s' % (cert.uri, cert.ski))
        return cert # cache hit

    logger.debug('parsing cert at %s' % (obj.uri,))

    """Process Resource CA Certificates"""
    x509 = rpki.POW.X509.derRead(obj.der)

    # ensure this is a resource CA Certificate (ignore Router certs)
    bc = x509.getBasicConstraints()
    is_ca = bc is not None and bc[0]
    if not is_ca:
	return

    # locate the parent certificate
    if obj.aki and obj.aki != obj.ski:
        try:
            issuer = models.Cert.objects.get(ski=obj.aki)
        except models.Cert.DoesNotExist:
            # process parent cert first
            issuer = process_certificate(auth, rpki.rcynicdb.models.RPKIObject.objects.get(ski=obj.aki, authenticated=auth))
    else:
        issuer = None # root

    asns, v4, v6 = x509.getRFC3779()

    cert = models.Cert.objects.create(
	uri=obj.uri,
	ski=obj.ski,
        not_before=x509.getNotBefore(),
        not_after=x509.getNotAfter(),
        issuer=issuer
    )

    if issuer is None:
	cert.issuer = cert  # self-signed
	cert.save()

    if asns == 'inherit':
        cert.asns.add(issuer.asns.all())
    elif asns:
	for asmin, asmax in asns:
	    asr, _ = models.ASRange.objects.get_or_create(min=asmin, max=asmax)
	    cert.asns.add(asr)

    if v4 == 'inherit':
        cert.addresses.add(issuer.addresses.all())
    elif v4:
	for v4min, v4max in v4:
	    pfx, _ = models.AddressRange.objects.get_or_create(prefix_min=v4min, prefix_max=v4max)
	    cert.addresses.add(pfx)

    if v6 == 'inherit':
        cert.addresses_v6.add(issuer.addresses_v6.all())
    elif v6:
	for v6min, v6max in v6:
	    pfx, _ = models.AddressRangeV6.objects.get_or_create(prefix_min=v6min, prefix_max=v6max)
	    cert.addresses_v6.add(pfx)

    return cert

def process_roa(auth, obj):
    logger.debug('parsing roa at %s' % (obj.uri,))

    r = rpki.POW.ROA.derRead(obj.der)
    r.verify() # required in order to extract asID
    ee = r.certs()[0] # rpki.POW.X509
    aki = ee.getAKI().encode('hex')

    logger.debug('looking for ca cert with ski=%s' % (aki,))

    # Locate the Resource CA cert that issued the EE that signed this ROA
    issuer = models.Cert.objects.get(ski=aki)

    roa = models.ROA.objects.create(
	    uri=obj.uri,
	    asid=r.getASID(),
	    not_before=ee.getNotBefore(),
	    not_after=ee.getNotAfter(),
	    issuer=issuer)

    prefixes = r.getPrefixes()
    if prefixes[0]: # v4
	for p in prefixes[0]:
	    v = rpki.resource_set.roa_prefix_ipv4(*p)
	    roapfx, _ = models.ROAPrefixV4.objects.get_or_create(prefix_min=v.min(), prefix_max=v.max(), max_length=v.max_prefixlen)
	    roa.prefixes.add(roapfx)
    if prefixes[1]: # v6
	for p in prefixes[1]:
	    v = rpki.resource_set.roa_prefix_ipv6(*p)
	    roapfx, _ = models.ROAPrefixV6.objects.get_or_create(prefix_min=v.min(), prefix_max=v.max(), max_length=v.max_prefixlen)
	    roa.prefixes_v6.add(roapfx)

    return roa

def process_ghostbuster(auth, obj):
    logger.debug('parsing ghostbuster at %s' % (obj.uri,))
    g = rpki.POW.CMS.derRead(obj.der)
    ee = g.certs()[0] # rpki.POW.X509
    aki = ee.getAKI().encode('hex')
    vcard = vobject.readOne(g.verify())

    # Locate the Resource CA cert that issued the EE that signed this ROA
    issuer = models.Cert.objects.get(ski=aki)

    gbr = models.Ghostbuster.objects.create(
	    uri=obj.uri,
	    issuer=issuer, 
	    not_before=ee.getNotBefore(),
	    not_after=ee.getNotAfter(),
	    full_name = vcard.fn.value if hasattr(vcard, 'fn') else None,
	    email_address = vcard.email.value if hasattr(vcard, 'email') else None,
	    telephone = vcard.tel.value if hasattr(vcard, 'tel') else None,
	    organization = vcard.org.value[0] if hasattr(vcard, 'org') else None
	    )

    return gbr

@transaction.atomic
def process_cache():
    logger.info('processing rpki cache')

    # foreign key constraints should cause all other objects to be removed
    models.Cert.objects.all().delete()

    # certs must be processed first in order to build proper foreign keys for roa/gbr
    dispatch = {
	    '.cer': process_certificate,
	    '.gbr': process_ghostbuster,
	    '.roa': process_roa
	    }

    auth = rpki.rcynicdb.models.Authenticated.objects.order_by('started').first()

    # Resource CA Certs are processed first in order to attach ROAs and Ghostbusters
    for suffix in ('.cer', '.roa', '.gbr'):
        cb = dispatch[suffix]

        for rpkiobj in auth.rpkiobject_set.filter(uri__endswith=suffix):
            cb(auth, rpkiobj)

    # Garbage collection - remove M2M relations for certs/ROAs which no longer exist
    models.ASRange.objects.annotate(num_certs=django.db.models.Count('certs')).filter(num_certs=0).delete()
    models.AddressRange.objects.annotate(num_certs=django.db.models.Count('certs')).filter(num_certs=0).delete()
    models.AddressRangeV6.objects.annotate(num_certs=django.db.models.Count('certs')).filter(num_certs=0).delete()

    models.ROAPrefixV4.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete()
    models.ROAPrefixV6.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete()


# dict mapping resource handle to list of published objects, use for notifying objects which have become invalid
uris = {}
model_map = { '.cer': models.Cert, '.roa': models.ROA, '.gbr': models.Ghostbuster }

def fetch_published_objects():
    """Query rpkid for all objects published by local users, and look up the
    current validation status of each object.  The validation status is used
    later to send alerts for objects which have transitioned to invalid.
    """
    logger.info('querying for published objects')

    handles = [conf.handle for conf in Conf.objects.all()]
    q_msg = Element(rpki.left_right.tag_msg, nsmap = rpki.left_right.nsmap,
                    type = "query", version = rpki.left_right.version)
    for h in handles:
        SubElement(q_msg, rpki.left_right.tag_list_published_objects, tenant_handle=h, tag=h)
    try:
        z = Zookeeper()
        r_msg = z.call_rpkid(q_msg)
    except Exception as err:
        logger.error('Unable to connect to rpkid to fetch list of published objects')
        logger.exception(err)
        # Should be safe to continue processing the rcynic cache, we just don't do any notifications
        return

    for r_pdu in r_msg:
        if r_pdu.tag == rpki.left_right.tag_list_published_objects:
            # Look up the object in the rcynic cache
            uri = r_pdu.get('uri')
            ext = os.path.splitext(uri)[1]
	    if ext in model_map:
		model = model_map[ext]
		handle = r_pdu.get('tenant_handle')

		if model.objects.filter(uri=uri).exists():
		    v = uris.setdefault(handle, [])
		    v.append(uri)
		    logger.debug('adding %s', uri)
		#else:
		    # this object is not in the cache.  it was either published
		    # recently, or disappared previously.  if it disappeared
		    # previously, it has already been alerted.  in either case, we
		    # omit the uri from the list since we are interested only in
		    # objects which were valid and are no longer valid
	    else:
		logger.debug('skipping object ext=%s uri=%s' % (ext, uri))

        elif r_pdu.tag == rpki.left_right.tag_report_error:
            logging.error('rpkid reported an error: %s', r_pdu.get("error_code"))


def notify_invalid():
    """Send email alerts to the addresses registered in ghostbuster records for
    any invalid objects that were published by users of this system.
    """

    logger.info('sending notifications for invalid objects')

    for handle, published_objects in uris.iteritems():
        missing = []
        for u in published_objects:
            ext = os.path.splitext(u)[1]
            model = model_map[ext]
            if not model.objects.filter(uri=u).exists():
                missing.append(u)

        if missing:
            conf = Conf.objects.get(handle)

            msg = StringIO()
            msg.write('This is an alert about problems with objects published by '
                      'the resource handle %s.\n\n' % handle)

            msg.write('The following objects were previously valid, but are '
                      'now invalid:\n')

            for u in missing:
                msg.write('\n')
                msg.write(u)
                msg.write('\n')

            msg.write("""--
You are receiving this email because your address is published in a Ghostbuster
record, or is the default email address for this resource holder account on
%s.""" % getfqdn())

            from_email = 'root@' + getfqdn()
            subj = 'invalid RPKI object alert for resource handle %s' % conf.handle
            conf.send_alert(subj, msg.getvalue(), from_email, severity=Alert.ERROR)


def update_cache():
    """Cache information from the current rcynicdb for display by the gui"""

    start = time.time()
    fetch_published_objects()
    process_cache()
    notify_invalid()

    rpki.gui.app.timestamp.update('rcynic_import')

    stop = time.time()
    logger.info('elapsed time %d seconds.', (stop - start))


if __name__ == '__main__':
    process_cache()