aboutsummaryrefslogtreecommitdiff
path: root/rpkid/portal-gui/scripts/rpkigui-rcynic.py
blob: 5be680395ef6f6fcf73def3f18068ad92b02c1c2 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
# $Id$
# Copyright (C) 2011  SPARTA, Inc. dba Cobham Analytic Solutions
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS.  IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#

default_logfile = '/var/rcynic/data/rcynic.xml'
default_root = '/var/rcynic/data'

import os, sys, time, vobject
os.environ['DJANGO_SETTINGS_MODULE'] = 'rpki.gui.settings'

from rpki.gui.cacheview import models
from rpki.rcynic import rcynic_xml_iterator, label_iterator
from rpki.sundial import datetime
from django.db import transaction

debug = False

def process_object(obj, model_class):
    """
    do initial processing on a rcynic_object instance.

    return value is a tuple: first element is a boolean value indicating whether
    the object is changed/new since the last time we processed it.  second
    element is the db instance.
    """
    if debug:
        print 'processing %s at %s' % (obj.__class__.__name__, obj.uri)

    q = model_class.objects.filter(uri=obj.uri)
    if not q:
        if debug:
            print 'creating new db instance'
        inst = model_class(uri=obj.uri)
    else:
        inst = q[0]

    # metadata that is updated on every run, regardless of whether the object
    # has changed
    inst.ok = obj.ok
    inst.status = models.ValidationStatus.objects.get(label=obj.status)
    inst.timestamp = datetime.fromXMLtime(obj.timestamp).to_sql()

    # determine if the object is changed/new
    mtime = os.stat(obj.filename)[8]
    if mtime != inst.mtime:
        inst.mtime = mtime
        inst.not_before = obj.notBefore.to_sql()
        inst.not_after = obj.notAfter.to_sql()
        if debug:
            sys.stderr.write('name=%s ski=%s\n' % (obj.subject, obj.ski))
        inst.name = obj.subject
        inst.keyid = obj.ski

        # look up signing cert
        if obj.issuer == obj.subject:
            # self-signed cert (TA)
            inst.cert = inst
        else:
            q = models.Cert.objects.filter(keyid=obj.aki)
            if q:
                inst.issuer = q[0]
            else:
                sys.stderr.write('warning: unable to find signing cert with ski=%s (%s)\n' % (obj.aki, obj.issuer))

        return True, inst
    elif debug:
        print 'object is unchanged'

    # metadata has changed, so a save is required
    inst.save()

    return False, inst

def process_rescert(cert):
    """
    Process a RPKI resource certificate.
    """

    refresh, obj = process_object(cert, models.Cert)

    if refresh:
        obj.save()

        # resources can change when a cert is updated
        obj.asns.clear()
        obj.addresses.clear()

        transaction.enter_transaction_management()
        transaction.managed()
        for asr in cert.resources.asn:
            if debug:
                sys.stderr.write('processing %s\n' % asr)

            attrs = { 'min': asr.min, 'max': asr.max }
            q = models.ASRange.objects.filter(**attrs)
            if not q:
                obj.asns.create(**attrs)
            else:
                obj.asns.add(q[0])

        for family, addrset in (4, cert.resources.v4), (6, cert.resources.v6):
            for rng in addrset:
                if debug:
                    sys.stderr.write('processing %s\n' % rng)

                attrs = { 'family': family, 'min': str(rng.min), 'max': str(rng.max) }
                q = models.AddressRange.objects.filter(**attrs)
                if not q:
                    obj.addresses.create(**attrs)
                else:
                    obj.addresses.add(q[0])
        transaction.commit()
        transaction.leave_transaction_management()

    if debug:
        print 'finished processing rescert at %s' % cert.uri

    return obj

def process_ghostbuster(gbr):
    refresh, obj = process_object(gbr, models.Ghostbuster)

    if refresh:
        vcard = vobject.readOne(gbr.vcard)
        if debug:
            vcard.prettyPrint()
        obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None
        obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None
        obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None
        obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None
        obj.save()

fam_map = { 'roa_prefix_set_ipv6': 6, 'roa_prefix_set_ipv4': 4 }

def process_roa(roa):
    refresh, obj = process_object(roa, models.ROA)

    if refresh:
        obj.asid = roa.asID
        obj.save()
        obj.prefixes.clear()
        for pfxset in roa.prefix_sets:
            family = fam_map[pfxset.__class__.__name__]
            for pfx in pfxset:
                attrs = { 'family' : family,
                          'prefix': str(pfx.prefix),
                          'bits' : pfx.prefixlen,
                          'max_length': pfx.max_prefixlen }
                q = models.ROAPrefix.objects.filter(**attrs)
                if not q:
                    obj.prefixes.create(**attrs)
                else:
                    obj.prefixes.add(q[0])

    return obj

def trydelete(seq):
    """
    iterate over a sequence and attempt to delete each item.  safely
    ignore IntegrityError since the object may be referenced elsewhere.
    """
    for o in seq:
        try:
            o.delete()
        except IntegrityError:
            pass

def garbage_collect(ts):
    """
    rcynic's XML output file tells us what is currently in the cache,
    but not what has been removed.  we save the timestamp from the first
    entry in the XML file, and remove all objects which are older.
    """
    if debug:
        print 'doing garbage collection'

    for roa in models.ROA.objects.filter(timestamp__lt=ts):
        if debug:
            sys.stderr.write('removing %s\n' % roa.uri)
        trydelete(roa.prefixes.all())
        roa.delete()

    for cert in models.Cert.objects.filter(timestamp__lt=ts):
        if debug:
            sys.stderr.write('removing %s\n' % cert.uri)
        trydelete(cert.asns.all())
        trydelete(cert.addresses.all())
        cert.delete()

    for gbr in models.Ghostbuster.objects.filter(timestamp__lt=ts):
        if debug:
            sys.stderr.write('removing %s\n' % gbr.uri)
        gbr.delete()

def process_cache(root, xml_file):
    start = time.time()

    # the timestamp from the first element in the rcynic xml file is saved
    # to perform garbage collection of stale db entries
    ts = 0

    dispatch = {
      'rcynic_certificate': process_rescert,
      'rcynic_roa'        : process_roa,
      'rcynic_ghostbuster': process_ghostbuster
    }

    # loop over all rcynic objects and dispatch based on the returned
    # rcynic_object subclass
    for obj in rcynic_xml_iterator(root, xml_file):
        r = dispatch[obj.__class__.__name__](obj)
        if not ts:
            ts = r.timestamp
    garbage_collect(ts)

    if debug:
        stop = time.time()
        sys.stdout.write('elapsed time %d seconds.\n' % (stop - start))

def process_labels(xml_file):
    if debug:
        sys.stderr.write('updating labels...\n')

    transaction.enter_transaction_management()
    transaction.managed()
    kinds = { 'good': 0, 'warn': 1, 'bad': 2 }
    for label, kind, desc in label_iterator(xml_file):
        if debug:
            sys.stderr.write('label=%s kind=%s desc=%s\n' % (label, kind, desc))
        if kind:
            q = models.ValidationStatus.objects.filter(label=label)
            if not q:
                obj = models.ValidationStatus(label=label)
            else:
                obj = q[0]

            obj.kind = kinds[kind]
            obj.status = desc
            obj.save()
    transaction.commit()
    transaction.leave_transaction_management()

if __name__ == '__main__':
    import optparse

    parser = optparse.OptionParser()
    parser.add_option("-d", "--debug", action="store_true",
            help="enable debugging message")
    parser.add_option("-f", "--file", dest="logfile",
            help="specify the rcynic XML file to parse [default: %default]",
            default=default_logfile)
    parser.add_option("-r", "--root",
            help="specify the chroot directory for the rcynic jail [default: %default]",
            metavar="DIR", default=default_root)
    options, args = parser.parse_args(sys.argv)
    if options.debug:
        debug = True

    process_labels(options.logfile)
    process_cache(options.root, options.logfile)

# vim:sw=4 ts=8