diff options
Diffstat (limited to 'rpki')
132 files changed, 28997 insertions, 0 deletions
diff --git a/rpki/POW/__init__.py b/rpki/POW/__init__.py new file mode 100644 index 00000000..d3796245 --- /dev/null +++ b/rpki/POW/__init__.py @@ -0,0 +1,7 @@ +from _POW import * + +# Set callback to let POW construct rpki.sundial.datetime objects + +from rpki.sundial import datetime as sundial_datetime +customDatetime(sundial_datetime) +del sundial_datetime diff --git a/rpki/__init__.py b/rpki/__init__.py new file mode 100644 index 00000000..9e090f63 --- /dev/null +++ b/rpki/__init__.py @@ -0,0 +1,2 @@ +# This file exists to tell Python that this the content of this +# directory constitute a Python package. diff --git a/rpki/adns.py b/rpki/adns.py new file mode 100644 index 00000000..a6a900ed --- /dev/null +++ b/rpki/adns.py @@ -0,0 +1,368 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2003--2007, 2009, 2010 Nominum, Inc. ("NOMINUM") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND NOMINUM DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR NOMINUM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Basic asynchronous DNS code, using asyncore and Bob Halley's excellent +dnspython package. +""" + +import asyncore +import socket +import time +import sys +import rpki.async +import rpki.sundial +import rpki.log + +try: + import dns.resolver, dns.rdatatype, dns.rdataclass, dns.name, dns.message + import dns.inet, dns.exception, dns.query, dns.rcode, dns.ipv4, dns.ipv6 +except ImportError: + if __name__ == "__main__": + sys.stderr.write("DNSPython not available, skipping rpki.adns unit test\n") + sys.exit(0) + else: + raise + +## @var resolver +# Resolver object, shared by everything using this module + +resolver = dns.resolver.Resolver() +if resolver.cache is None: + resolver.cache = dns.resolver.Cache() + +## @var nameservers +# Nameservers from resolver.nameservers converted to (af, address) +# pairs. The latter turns out to be a more useful form for us to use +# internally, because it simplifies the checks we need to make upon +# packet receiption. + +nameservers = [] + +for ns in resolver.nameservers: + try: + nameservers.append((socket.AF_INET, dns.ipv4.inet_aton(ns))) + continue + except Exception: + pass + try: + nameservers.append((socket.AF_INET6, dns.ipv6.inet_aton(ns))) + continue + except Exception: + pass + rpki.log.error("Couldn't parse nameserver address %r" % ns) + +class dispatcher(asyncore.dispatcher): + """ + Basic UDP socket reader for use with asyncore. + """ + + def __init__(self, cb, eb, af, bufsize = 65535): + asyncore.dispatcher.__init__(self) + self.cb = cb + self.eb = eb + self.af = af + self.bufsize = bufsize + self.create_socket(af, socket.SOCK_DGRAM) + + def handle_read(self): + """ + Receive a packet, hand it off to query class callback. + """ + wire, from_address = self.recvfrom(self.bufsize) + self.cb(self.af, from_address[0], from_address[1], wire) + + def handle_error(self): + """ + Pass errors to query class errback. + """ + self.eb(sys.exc_info()[1]) + + def handle_connect(self): + """ + Quietly ignore UDP "connection" events. + """ + pass + + def writable(self): + """ + We don't need to hear about UDP socket becoming writable. + """ + return False + + +class query(object): + """ + Simplified (no search paths) asynchronous adaptation of + dns.resolver.Resolver.query() (q.v.). + """ + + def __init__(self, cb, eb, qname, qtype = dns.rdatatype.A, qclass = dns.rdataclass.IN): + if isinstance(qname, (str, unicode)): + qname = dns.name.from_text(qname) + if isinstance(qtype, str): + qtype = dns.rdatatype.from_text(qtype) + if isinstance(qclass, str): + qclass = dns.rdataclass.from_text(qclass) + assert qname.is_absolute() + self.cb = cb + self.eb = eb + self.qname = qname + self.qtype = qtype + self.qclass = qclass + self.start = time.time() + rpki.async.event_defer(self.go) + + def go(self): + """ + Start running the query. Check our cache before doing network + query; if we find an answer there, just return it. Otherwise + start the network query. + """ + if resolver.cache: + answer = resolver.cache.get((self.qname, self.qtype, self.qclass)) + else: + answer = None + if answer: + self.cb(self, answer) + else: + self.timer = rpki.async.timer() + self.sockets = {} + self.request = dns.message.make_query(self.qname, self.qtype, self.qclass) + if resolver.keyname is not None: + self.request.use_tsig(resolver.keyring, resolver.keyname, resolver.keyalgorithm) + self.request.use_edns(resolver.edns, resolver.ednsflags, resolver.payload) + self.response = None + self.backoff = 0.10 + self.nameservers = nameservers[:] + self.loop1() + + def loop1(self): + """ + Outer loop. If we haven't got a response yet and still have + nameservers to check, start inner loop. Otherwise, we're done. + """ + self.timer.cancel() + if self.response is None and self.nameservers: + self.iterator = rpki.async.iterator(self.nameservers[:], self.loop2, self.done2) + else: + self.done1() + + def loop2(self, iterator, nameserver): + """ + Inner loop. Send query to next nameserver in our list, unless + we've hit the overall timeout for this query. + """ + self.timer.cancel() + try: + timeout = resolver._compute_timeout(self.start) + except dns.resolver.Timeout, e: + self.lose(e) + else: + af, addr = nameserver + if af not in self.sockets: + self.sockets[af] = dispatcher(self.socket_cb, self.socket_eb, af) + self.sockets[af].sendto(self.request.to_wire(), + (dns.inet.inet_ntop(af, addr), resolver.port)) + self.timer.set_handler(self.socket_timeout) + self.timer.set_errback(self.socket_eb) + self.timer.set(rpki.sundial.timedelta(seconds = timeout)) + + def socket_timeout(self): + """ + No answer from nameserver, move on to next one (inner loop). + """ + self.response = None + self.iterator() + + def socket_eb(self, e): + """ + UDP socket signaled error. If it really is some kind of socket + error, handle as if we've timed out on this nameserver; otherwise, + pass error back to caller. + """ + self.timer.cancel() + if isinstance(e, socket.error): + self.response = None + self.iterator() + else: + self.lose(e) + + def socket_cb(self, af, from_host, from_port, wire): + """ + Received a packet that might be a DNS message. If it doesn't look + like it came from one of our nameservers, just drop it and leave + the timer running. Otherwise, try parsing it: if it's an answer, + we're done, otherwise handle error appropriately and move on to + next nameserver. + """ + sender = (af, dns.inet.inet_pton(af, from_host)) + if from_port != resolver.port or sender not in self.nameservers: + return + self.timer.cancel() + try: + self.response = dns.message.from_wire(wire, keyring = self.request.keyring, request_mac = self.request.mac, one_rr_per_rrset = False) + except dns.exception.FormError: + self.nameservers.remove(sender) + else: + rcode = self.response.rcode() + if rcode in (dns.rcode.NOERROR, dns.rcode.NXDOMAIN): + self.done1() + return + if rcode != dns.rcode.SERVFAIL: + self.nameservers.remove(sender) + self.response = None + self.iterator() + + def done2(self): + """ + Done with inner loop. If we still haven't got an answer and + haven't (yet?) eliminated all of our nameservers, wait a little + while before starting the cycle again, unless we've hit the + timeout threshold for the whole query. + """ + if self.response is None and self.nameservers: + try: + delay = rpki.sundial.timedelta(seconds = min(resolver._compute_timeout(self.start), self.backoff)) + self.backoff *= 2 + self.timer.set_handler(self.loop1) + self.timer.set_errback(self.lose) + self.timer.set(delay) + except dns.resolver.Timeout, e: + self.lose(e) + else: + self.loop1() + + def cleanup(self): + """ + Shut down our timer and sockets. + """ + self.timer.cancel() + for s in self.sockets.itervalues(): + s.close() + + def lose(self, e): + """ + Something bad happened. Clean up, then pass error back to caller. + """ + self.cleanup() + self.eb(self, e) + + def done1(self): + """ + Done with outer loop. If we got a useful answer, cache it, then + pass it back to caller; if we got an error, pass the appropriate + exception back to caller. + """ + self.cleanup() + try: + if not self.nameservers: + raise dns.resolver.NoNameservers + if self.response.rcode() == dns.rcode.NXDOMAIN: + raise dns.resolver.NXDOMAIN + answer = dns.resolver.Answer(self.qname, self.qtype, self.qclass, self.response) + if resolver.cache: + resolver.cache.put((self.qname, self.qtype, self.qclass), answer) + self.cb(self, answer) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + self.lose(e) + +class getaddrinfo(object): + + typemap = { dns.rdatatype.A : socket.AF_INET, + dns.rdatatype.AAAA : socket.AF_INET6 } + + def __init__(self, cb, eb, host, address_families = typemap.values()): + self.cb = cb + self.eb = eb + self.host = host + self.result = [] + self.queries = [query(self.done, self.lose, host, qtype) + for qtype in self.typemap + if self.typemap[qtype] in address_families] + + def done(self, q, answer): + if answer is not None: + for a in answer: + self.result.append((self.typemap[a.rdtype], a.address)) + self.queries.remove(q) + if not self.queries: + self.cb(self.result) + + def lose(self, q, e): + if isinstance(e, dns.resolver.NoAnswer): + self.done(q, None) + else: + for q in self.queries: + q.cleanup() + self.eb(e) + +if __name__ == "__main__": + + rpki.log.init("test-adns", use_syslog = False) + print "Some adns tests may take a minute or two, please be patient" + + class test_getaddrinfo(object): + + def __init__(self, qname): + self.qname = qname + getaddrinfo(self.done, self.lose, qname) + + def done(self, result): + print "getaddrinfo(%s) returned: %s" % ( + self.qname, + ", ".join(str(r) for r in result)) + + def lose(self, e): + print "getaddrinfo(%s) failed: %r" % (self.qname, e) + + class test_query(object): + + def __init__(self, qname, qtype = dns.rdatatype.A, qclass = dns.rdataclass.IN): + self.qname = qname + self.qtype = qtype + self.qclass = qclass + query(self.done, self.lose, qname, qtype = qtype, qclass = qclass) + + def done(self, q, result): + print "query(%s, %s, %s) returned: %s" % ( + self.qname, + dns.rdatatype.to_text(self.qtype), + dns.rdataclass.to_text(self.qclass), + ", ".join(str(r) for r in result)) + + def lose(self, q, e): + print "getaddrinfo(%s, %s, %s) failed: %r" % ( + self.qname, + dns.rdatatype.to_text(self.qtype), + dns.rdataclass.to_text(self.qclass), + e) + + if True: + for t in (dns.rdatatype.A, dns.rdatatype.AAAA, dns.rdatatype.HINFO): + test_query("subvert-rpki.hactrn.net", t) + test_query("nonexistant.rpki.net") + test_query("subvert-rpki.hactrn.net", qclass = dns.rdataclass.CH) + + for h in ("subvert-rpki.hactrn.net", "nonexistant.rpki.net"): + test_getaddrinfo(h) + + rpki.async.event_loop() diff --git a/rpki/async.py b/rpki/async.py new file mode 100644 index 00000000..49f98841 --- /dev/null +++ b/rpki/async.py @@ -0,0 +1,420 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Utilities for event-driven programming. +""" + +import asyncore +import signal +import traceback +import gc +import sys +import rpki.log +import rpki.sundial + +ExitNow = asyncore.ExitNow + +class iterator(object): + """ + Iteration construct for event-driven code. Takes three + arguments: + + - Some kind of iterable object + + - A callback to call on each item in the iteration + + - A callback to call after the iteration terminates. + + The item callback receives two arguments: the callable iterator + object and the current value of the iteration. It should call the + iterator (or arrange for the iterator to be called) when it is time + to continue to the next item in the iteration. + + The termination callback receives no arguments. + + Special case for memory constrained cases: if keyword argument + pop_list is True, iterable must be a list, which is modified in + place, popping items off of it until it's empty. + """ + + def __init__(self, iterable, item_callback, done_callback, unwind_stack = True, pop_list = False): + assert not pop_list or isinstance(iterable, list), "iterable must be a list when using pop_list" + self.item_callback = item_callback + self.done_callback = done_callback if done_callback is not None else lambda: None + self.caller_file, self.caller_line, self.caller_function = traceback.extract_stack(limit = 2)[0][0:3] + self.unwind_stack = unwind_stack + self.pop_list = pop_list + try: + if self.pop_list: + self.iterator = iterable + else: + self.iterator = iter(iterable) + except (ExitNow, SystemExit): + raise + except Exception: + rpki.log.debug("Problem constructing iterator for %r" % (iterable,)) + raise + self.doit() + + def __repr__(self): + return rpki.log.log_repr(self, + "created at %s:%s" % (self.caller_file, + self.caller_line), + self.caller_function) + + def __call__(self): + if self.unwind_stack: + event_defer(self.doit) + else: + self.doit() + + def doit(self): + """ + Implement the iterator protocol: attempt to call the item handler + with the next iteration value, call the termination handler if the + iterator signaled StopIteration. + """ + + try: + if self.pop_list: + val = self.iterator.pop(0) + else: + val = self.iterator.next() + except (IndexError, StopIteration): + self.done_callback() + else: + self.item_callback(self, val) + +## @var timer_queue +# Timer queue. + +timer_queue = [] + +class timer(object): + """ + Timer construct for event-driven code. + """ + + ## @var gc_debug + # Verbose chatter about timers states and garbage collection. + gc_debug = False + + ## @var run_debug + # Verbose chatter about timers being run. + run_debug = False + + def __init__(self, handler = None, errback = None): + self.set_handler(handler) + self.set_errback(errback) + self.when = None + if self.gc_debug: + self.trace("Creating %r" % self) + + def trace(self, msg): + """ + Debug logging. + """ + if self.gc_debug: + bt = traceback.extract_stack(limit = 3) + rpki.log.debug("%s from %s:%d" % (msg, bt[0][0], bt[0][1])) + + def set(self, when): + """ + Set a timer. Argument can be a datetime, to specify an absolute + time, or a timedelta, to specify an offset time. + """ + if self.gc_debug: + self.trace("Setting %r to %r" % (self, when)) + if isinstance(when, rpki.sundial.timedelta): + self.when = rpki.sundial.now() + when + else: + self.when = when + assert isinstance(self.when, rpki.sundial.datetime), "%r: Expecting a datetime, got %r" % (self, self.when) + if self not in timer_queue: + timer_queue.append(self) + timer_queue.sort(key = lambda x: x.when) + + def __cmp__(self, other): + return cmp(id(self), id(other)) + + if gc_debug: + def __del__(self): + rpki.log.debug("Deleting %r" % self) + + def cancel(self): + """ + Cancel a timer, if it was set. + """ + if self.gc_debug: + self.trace("Canceling %r" % self) + try: + while True: + timer_queue.remove(self) + except ValueError: + pass + + def is_set(self): + """ + Test whether this timer is currently set. + """ + return self in timer_queue + + def set_handler(self, handler): + """ + Set timer's expiration handler. This is an alternative to + subclassing the timer class, and may be easier to use when + integrating timers into other classes (eg, the handler can be a + bound method to an object in a class representing a network + connection). + """ + self.handler = handler + + def set_errback(self, errback): + """ + Set a timer's errback. Like set_handler(), for errbacks. + """ + self.errback = errback + + @classmethod + def runq(cls): + """ + Run the timer queue: for each timer whose call time has passed, + pull the timer off the queue and call its handler() method. + + Comparisions are made against time at which this function was + called, so that even if new events keep getting scheduled, we'll + return to the I/O loop reasonably quickly. + """ + now = rpki.sundial.now() + while timer_queue and now >= timer_queue[0].when: + t = timer_queue.pop(0) + if cls.run_debug: + rpki.log.debug("Running %r" % t) + try: + if t.handler is not None: + t.handler() + else: + rpki.log.warn("Timer %r expired with no handler set" % t) + except (ExitNow, SystemExit): + raise + except Exception, e: + if t.errback is not None: + t.errback(e) + else: + rpki.log.error("Unhandled exception from timer %r: %s" % (t, e)) + rpki.log.traceback() + + def __repr__(self): + return rpki.log.log_repr(self, self.when, repr(self.handler)) + + @classmethod + def seconds_until_wakeup(cls): + """ + Calculate delay until next timer expires, or None if no timers are + set and we should wait indefinitely. Rounds up to avoid spinning + in select() or poll(). We could calculate fractional seconds in + the right units instead, but select() and poll() don't even take + the same units (argh!), and we're not doing anything that + hair-triggered, so rounding up is simplest. + """ + if not timer_queue: + return None + now = rpki.sundial.now() + if now >= timer_queue[0].when: + return 0 + delay = timer_queue[0].when - now + seconds = delay.convert_to_seconds() + if delay.microseconds: + seconds += 1 + return seconds + + @classmethod + def clear(cls): + """ + Cancel every timer on the queue. We could just throw away the + queue content, but this way we can notify subclasses that provide + their own cancel() method. + """ + while timer_queue: + timer_queue.pop(0).cancel() + +def _raiseExitNow(signum, frame): + """ + Signal handler for event_loop(). + """ + raise ExitNow + +def exit_event_loop(): + """ + Force exit from event_loop(). + """ + raise ExitNow + +def event_defer(handler, delay = rpki.sundial.timedelta(seconds = 0)): + """ + Use a near-term (default: zero interval) timer to schedule an event + to run after letting the I/O system have a turn. + """ + timer(handler).set(delay) + +## @var debug_event_timing +# Enable insanely verbose logging of event timing + +debug_event_timing = False + +def event_loop(catch_signals = (signal.SIGINT, signal.SIGTERM)): + """ + Replacement for asyncore.loop(), adding timer and signal support. + """ + old_signal_handlers = {} + while True: + save_sigs = len(old_signal_handlers) == 0 + try: + for sig in catch_signals: + old = signal.signal(sig, _raiseExitNow) + if save_sigs: + old_signal_handlers[sig] = old + while asyncore.socket_map or timer_queue: + t = timer.seconds_until_wakeup() + if debug_event_timing: + rpki.log.debug("Dismissing to asyncore.poll(), t = %s, q = %r" % (t, timer_queue)) + asyncore.poll(t, asyncore.socket_map) + timer.runq() + if timer.gc_debug: + gc.collect() + if gc.garbage: + for i in gc.garbage: + rpki.log.debug("GC-cycle %r" % i) + del gc.garbage[:] + except ExitNow: + break + except SystemExit: + raise + except ValueError, e: + if str(e) == "filedescriptor out of range in select()": + rpki.log.error("Something is badly wrong, select() thinks we gave it a bad file descriptor.") + rpki.log.error("Content of asyncore.socket_map:") + for fd in sorted(asyncore.socket_map.iterkeys()): + rpki.log.error(" fd %s obj %r" % (fd, asyncore.socket_map[fd])) + rpki.log.error("Not safe to continue due to risk of spin loop on select(). Exiting.") + sys.exit(1) + rpki.log.error("event_loop() exited with exception %r, this is not supposed to happen, restarting" % e) + except Exception, e: + rpki.log.error("event_loop() exited with exception %r, this is not supposed to happen, restarting" % e) + else: + break + finally: + for sig in old_signal_handlers: + signal.signal(sig, old_signal_handlers[sig]) + +class sync_wrapper(object): + """ + Synchronous wrapper around asynchronous functions. Running in + asynchronous mode at all times makes sense for event-driven daemons, + but is kind of tedious for simple scripts, hence this wrapper. + + The wrapped function should take at least two arguments: a callback + function and an errback function. If any arguments are passed to + the wrapper, they will be passed as additional arguments to the + wrapped function. + """ + + res = None + err = None + + def __init__(self, func): + self.func = func + + def cb(self, res = None): + """ + Wrapped code has requested normal termination. Store result, and + exit the event loop. + """ + self.res = res + raise ExitNow + + def eb(self, err): + """ + Wrapped code raised an exception. Store exception data, then exit + the event loop. + """ + exc_info = sys.exc_info() + self.err = exc_info if exc_info[1] is err else err + raise ExitNow + + def __call__(self, *args, **kwargs): + + def thunk(): + try: + self.func(self.cb, self.eb, *args, **kwargs) + except ExitNow: + raise + except Exception, e: + self.eb(e) + + event_defer(thunk) + event_loop() + if self.err is None: + return self.res + elif isinstance(self.err, tuple): + raise self.err[0], self.err[1], self.err[2] + else: + raise self.err + +class gc_summary(object): + """ + Periodic summary of GC state, for tracking down memory bloat. + """ + + def __init__(self, interval, threshold = 0): + if isinstance(interval, (int, long)): + interval = rpki.sundial.timedelta(seconds = interval) + self.interval = interval + self.threshold = threshold + self.timer = timer(handler = self.handler) + self.timer.set(self.interval) + + def handler(self): + """ + Collect and log GC state for this period, reset timer. + """ + rpki.log.debug("gc_summary: Running gc.collect()") + gc.collect() + rpki.log.debug("gc_summary: Summarizing (threshold %d)" % self.threshold) + total = {} + tuples = {} + for g in gc.get_objects(): + k = type(g).__name__ + total[k] = total.get(k, 0) + 1 + if isinstance(g, tuple): + k = ", ".join(type(x).__name__ for x in g) + tuples[k] = tuples.get(k, 0) + 1 + rpki.log.debug("gc_summary: Sorting result") + total = total.items() + total.sort(reverse = True, key = lambda x: x[1]) + tuples = tuples.items() + tuples.sort(reverse = True, key = lambda x: x[1]) + rpki.log.debug("gc_summary: Object type counts in descending order") + for name, count in total: + if count > self.threshold: + rpki.log.debug("gc_summary: %8d %s" % (count, name)) + rpki.log.debug("gc_summary: Tuple content type signature counts in descending order") + for types, count in tuples: + if count > self.threshold: + rpki.log.debug("gc_summary: %8d (%s)" % (count, types)) + rpki.log.debug("gc_summary: Scheduling next cycle") + self.timer.set(self.interval) diff --git a/rpki/cli.py b/rpki/cli.py new file mode 100644 index 00000000..1930f2b7 --- /dev/null +++ b/rpki/cli.py @@ -0,0 +1,277 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Utilities for writing command line tools. +""" + +import cmd +import glob +import shlex +import os.path +import argparse +import traceback + +try: + import readline + have_readline = True +except ImportError: + have_readline = False + +class BadCommandSyntax(Exception): + "Bad command line syntax." + +class ExitArgparse(Exception): + "Exit method from ArgumentParser." + + def __init__(self, message = None, status = 0): + self.message = message + self.status = status + +class Cmd(cmd.Cmd): + """ + Customized subclass of Python cmd module. + """ + + emptyline_repeats_last_command = False + + EOF_exits_command_loop = True + + identchars = cmd.IDENTCHARS + "/-." + + histfile = None + + last_command_failed = False + + def __init__(self, argv = None): + cmd.Cmd.__init__(self) + if argv: + self.onecmd(" ".join(argv)) + else: + self.cmdloop_with_history() + + def onecmd(self, line): + """ + Wrap error handling around cmd.Cmd.onecmd(). Might want to do + something kinder than showing a traceback, eventually. + """ + + self.last_command_failed = False + try: + return cmd.Cmd.onecmd(self, line) + except SystemExit: + raise + except ExitArgparse, e: + if e.message is not None: + print e.message + self.last_command_failed = e.status != 0 + return False + except BadCommandSyntax, e: + print e + except Exception: + traceback.print_exc() + self.last_command_failed = True + return False + + def do_EOF(self, arg): + if self.EOF_exits_command_loop and self.prompt: + print + return self.EOF_exits_command_loop + + def do_exit(self, arg): + """ + Exit program. + """ + + return True + + do_quit = do_exit + + def emptyline(self): + """ + Handle an empty line. cmd module default is to repeat the last + command, which I find to be violation of the principal of least + astonishment, so my preference is that an empty line does nothing. + """ + + if self.emptyline_repeats_last_command: + cmd.Cmd.emptyline(self) + + def filename_complete(self, text, line, begidx, endidx): + """ + Filename completion handler, with hack to restore what I consider + the normal (bash-like) behavior when one hits the completion key + and there's only one match. + """ + + result = glob.glob(text + "*") + if len(result) == 1: + path = result.pop() + if os.path.isdir(path) or (os.path.islink(path) and os.path.isdir(os.path.join(path, "."))): + result.append(path + os.path.sep) + else: + result.append(path + " ") + return result + + def completenames(self, text, *ignored): + """ + Command name completion handler, with hack to restore what I + consider the normal (bash-like) behavior when one hits the + completion key and there's only one match. + """ + + result = cmd.Cmd.completenames(self, text, *ignored) + if len(result) == 1: + result[0] += " " + return result + + def help_help(self): + """ + Type "help [topic]" for help on a command, + or just "help" for a list of commands. + """ + + self.stdout.write(self.help_help.__doc__ + "\n") + + def complete_help(self, *args): + """ + Better completion function for help command arguments. + """ + + text = args[0] + names = self.get_names() + result = [] + for prefix in ("do_", "help_"): + result.extend(s[len(prefix):] for s in names if s.startswith(prefix + text) and s != "do_EOF") + return result + + if have_readline: + + def cmdloop_with_history(self): + """ + Better command loop, with history file and tweaked readline + completion delimiters. + """ + + old_completer_delims = readline.get_completer_delims() + if self.histfile is not None: + try: + readline.read_history_file(self.histfile) + except IOError: + pass + try: + readline.set_completer_delims("".join(set(old_completer_delims) - set(self.identchars))) + self.cmdloop() + finally: + if self.histfile is not None and readline.get_current_history_length(): + readline.write_history_file(self.histfile) + readline.set_completer_delims(old_completer_delims) + + else: + + cmdloop_with_history = cmd.Cmd.cmdloop + + + +def yes_or_no(prompt, default = None, require_full_word = False): + """ + Ask a yes-or-no question. + """ + + prompt = prompt.rstrip() + _yes_or_no_prompts[default] + while True: + answer = raw_input(prompt).strip().lower() + if not answer and default is not None: + return default + if answer == "yes" or (not require_full_word and answer.startswith("y")): + return True + if answer == "no" or (not require_full_word and answer.startswith("n")): + return False + print 'Please answer "yes" or "no"' + +_yes_or_no_prompts = { + True : ' ("yes" or "no" ["yes"]) ', + False : ' ("yes" or "no" ["no"]) ', + None : ' ("yes" or "no") ' } + + +class NonExitingArgumentParser(argparse.ArgumentParser): + """ + ArgumentParser tweaked to throw ExitArgparse exception + rather than using sys.exit(), for use with command loop. + """ + + def exit(self, status = 0, message = None): + raise ExitArgparse(status = status, message = message) + + +def parsecmd(subparsers, *arg_clauses): + """ + Decorator to combine the argparse and cmd modules. + + subparsers is an instance of argparse.ArgumentParser (or subclass) which was + returned by calling the .add_subparsers() method on an ArgumentParser instance + intended to handle parsing for the entire program on the command line. + + arg_clauses is a series of defarg() invocations defining arguments to be parsed + by the argparse code. + + The decorator will use arg_clauses to construct two separate argparse parser + instances: one will be attached to the global parser as a subparser, the + other will be used to parse arguments for this command when invoked by cmd. + + The decorator will replace the original do_whatever method with a wrapped version + which uses the local argparse instance to parse the single string supplied by + the cmd module. + + The intent is that, from the command's point of view, all of this should work + pretty much the same way regardless of whether the command was invoked from + the global command line or from within the cmd command loop. Either way, + the command method should get an argparse.Namespace object. + + In theory, we could generate a completion handler from the argparse definitions, + much as the separate argcomplete package does. In practice this is a lot of + work and I'm not ready to get into that just yet. + """ + + def decorate(func): + assert func.__name__.startswith("do_") + parser = NonExitingArgumentParser(description = func.__doc__, + prog = func.__name__[3:], + add_help = False) + subparser = subparsers.add_parser(func.__name__[3:], + description = func.__doc__, + help = func.__doc__.lstrip().partition("\n")[0]) + for positional, keywords in arg_clauses: + parser.add_argument(*positional, **keywords) + subparser.add_argument(*positional, **keywords) + subparser.set_defaults(func = func) + def wrapped(self, arg): + return func(self, parser.parse_args(shlex.split(arg))) + wrapped.argparser = parser + wrapped.__doc__ = func.__doc__ + return wrapped + return decorate + +def cmdarg(*positional, **keywords): + """ + Syntactic sugar to let us use keyword arguments normally when constructing + arguments for deferred calls to argparse.ArgumentParser.add_argument(). + """ + + return positional, keywords diff --git a/rpki/config.py b/rpki/config.py new file mode 100644 index 00000000..9f26664e --- /dev/null +++ b/rpki/config.py @@ -0,0 +1,301 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Configuration file parsing utilities, layered on top of stock Python +ConfigParser module. +""" + +import ConfigParser +import os +import re + +## @var default_filename +# Default name of config file if caller doesn't specify one explictly. + +default_filename = "rpki.conf" + +## @var default_dirname +# Default name of directory to check for global config file, or None +# if no global config file. Autoconf-generated code may set this to a +# non-None value during script startup. + +try: + import rpki.autoconf + default_dirname = rpki.autoconf.sysconfdir +except ImportError: + default_dirname = None + +## @var default_envname +# Name of environment variable containing config file name. + +default_envname = "RPKI_CONF" + +class parser(object): + """ + Extensions to stock Python ConfigParser: + + Read config file and set default section while initializing parser object. + + Support for OpenSSL-style subscripted options and a limited form of + OpenSSL-style indirect variable references (${section::option}). + + get-methods with default values and default section name. + + If no filename is given to the constructor (filename = None), we + check for an environment variable naming the config file, then we + check for a default filename in the current directory, then finally + we check for a global config file if autoconf provided a directory + name to check. + """ + + def __init__(self, filename = None, section = None, allow_missing = False): + + self.cfg = ConfigParser.RawConfigParser() + self.default_section = section + + filenames = [] + if filename is not None: + filenames.append(filename) + else: + if default_envname in os.environ: + filenames.append(os.environ[default_envname]) + filenames.append(default_filename) + if default_dirname is not None: + filenames.append("%s/%s" % (default_dirname, default_filename)) + + f = fn = None + + for fn in filenames: + try: + f = open(fn) + break + except IOError: + f = None + + if f is not None: + self.filename = fn + self.cfg.readfp(f, fn) + elif allow_missing: + self.filename = None + else: + raise + + def has_section(self, section): + """ + Test whether a section exists. + """ + + return self.cfg.has_section(section) + + def has_option(self, option, section = None): + """ + Test whether an option exists. + """ + + if section is None: + section = self.default_section + return self.cfg.has_option(section, option) + + def multiget(self, option, section = None): + """ + Parse OpenSSL-style foo.0, foo.1, ... subscripted options. + + Returns a list of values matching the specified option name. + """ + + matches = [] + if section is None: + section = self.default_section + if self.cfg.has_option(section, option): + matches.append((-1, self.get(option, section = section))) + for key in self.cfg.options(section): + s = key.rsplit(".", 1) + if len(s) == 2 and s[0] == option and s[1].isdigit(): + matches.append((int(s[1]), self.get(option, section = section))) + matches.sort() + return [match[1] for match in matches] + + _regexp = re.compile("\\${(.*?)::(.*?)}") + + def _repl(self, m): + """ + Replacement function for indirect variable substitution. + This is intended for use with re.subn(). + """ + section, option = m.group(1, 2) + if section == "ENV": + return os.getenv(option, "") + else: + return self.cfg.get(section, option) + + def get(self, option, default = None, section = None): + """ + Get an option, perhaps with a default value. + """ + if section is None: + section = self.default_section + if default is not None and not self.cfg.has_option(section, option): + return default + val = self.cfg.get(section, option) + while True: + val, modified = self._regexp.subn(self._repl, val, 1) + if not modified: + return val + + def getboolean(self, option, default = None, section = None): + """ + Get a boolean option, perhaps with a default value. + """ + v = self.get(option, default, section) + if isinstance(v, str): + v = v.lower() + if v not in self.cfg._boolean_states: + raise ValueError, "Not a boolean: %s" % v + v = self.cfg._boolean_states[v] + return v + + def getint(self, option, default = None, section = None): + """ + Get an integer option, perhaps with a default value. + """ + return int(self.get(option, default, section)) + + def getlong(self, option, default = None, section = None): + """ + Get a long integer option, perhaps with a default value. + """ + return long(self.get(option, default, section)) + + def set_global_flags(self): + """ + Consolidated control for all the little global control flags + scattered through the libraries. This isn't a particularly good + place for this function to live, but it has to live somewhere and + making it a method of the config parser from which it gets all of + its data is less silly than the available alternatives. + """ + + import rpki.http + import rpki.x509 + import rpki.sql + import rpki.async + import rpki.log + import rpki.daemonize + + try: + rpki.http.debug_http = self.getboolean("debug_http") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.want_persistent_client = self.getboolean("want_persistent_client") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.want_persistent_server = self.getboolean("want_persistent_server") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.use_adns = self.getboolean("use_adns") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.enable_ipv6_clients = self.getboolean("enable_ipv6_clients") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.enable_ipv6_servers = self.getboolean("enable_ipv6_servers") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.CMS_object.debug_cms_certs = self.getboolean("debug_cms_certs") + except ConfigParser.NoOptionError: + pass + + try: + rpki.sql.sql_persistent.sql_debug = self.getboolean("sql_debug") + except ConfigParser.NoOptionError: + pass + + try: + rpki.async.timer.gc_debug = self.getboolean("gc_debug") + except ConfigParser.NoOptionError: + pass + + try: + rpki.async.timer.run_debug = self.getboolean("timer_debug") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.dump_outbound_cms = rpki.x509.DeadDrop(self.get("dump_outbound_cms")) + except OSError, e: + rpki.log.warn("Couldn't initialize mailbox %s: %s" % (self.get("dump_outbound_cms"), e)) + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.dump_inbound_cms = rpki.x509.DeadDrop(self.get("dump_inbound_cms")) + except OSError, e: + rpki.log.warn("Couldn't initialize mailbox %s: %s" % (self.get("dump_inbound_cms"), e)) + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.check_inbound_schema = self.getboolean("check_inbound_schema") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.check_outbound_schema = self.getboolean("check_outbound_schema") + except ConfigParser.NoOptionError: + pass + + try: + rpki.async.gc_summary(self.getint("gc_summary"), self.getint("gc_summary_threshold", 0)) + except ConfigParser.NoOptionError: + pass + + try: + rpki.log.enable_tracebacks = self.getboolean("enable_tracebacks") + except ConfigParser.NoOptionError: + pass + + try: + rpki.daemonize.default_pid_directory = self.get("pid_directory") + except ConfigParser.NoOptionError: + pass + + try: + rpki.daemonize.pid_filename = self.get("pid_filename") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.generate_insecure_debug_only_rsa_key = rpki.x509.insecure_debug_only_rsa_key_generator(*self.get("insecure-debug-only-rsa-key-db").split()) + except ConfigParser.NoOptionError: + pass + except: + rpki.log.warn("insecure-debug-only-rsa-key-db configured but initialization failed, check for corrupted database file") diff --git a/rpki/csv_utils.py b/rpki/csv_utils.py new file mode 100644 index 00000000..47caabdb --- /dev/null +++ b/rpki/csv_utils.py @@ -0,0 +1,112 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +CSV utilities, moved here from myrpki.py. +""" + +import csv +import os + +class BadCSVSyntax(Exception): + """ + Bad CSV syntax. + """ + +class csv_reader(object): + """ + Reader for tab-delimited text that's (slightly) friendlier than the + stock Python csv module (which isn't intended for direct use by + humans anyway, and neither was this package originally, but that + seems to be the way that it has evolved...). + + Columns parameter specifies how many columns users of the reader + expect to see; lines with fewer columns will be padded with None + values. + + Original API design for this class courtesy of Warren Kumari, but + don't blame him if you don't like what I did with his ideas. + """ + + def __init__(self, filename, columns = None, min_columns = None, comment_characters = "#;"): + assert columns is None or isinstance(columns, int) + assert min_columns is None or isinstance(min_columns, int) + if columns is not None and min_columns is None: + min_columns = columns + self.filename = filename + self.columns = columns + self.min_columns = min_columns + self.comment_characters = comment_characters + self.file = open(filename, "r") + + def __iter__(self): + line_number = 0 + for line in self.file: + line_number += 1 + line = line.strip() + if not line or line[0] in self.comment_characters: + continue + fields = line.split() + if self.min_columns is not None and len(fields) < self.min_columns: + raise BadCSVSyntax, "%s:%d: Not enough columns in line %r" % (self.filename, line_number, line) + if self.columns is not None and len(fields) > self.columns: + raise BadCSVSyntax, "%s:%d: Too many columns in line %r" % (self.filename, line_number, line) + if self.columns is not None and len(fields) < self.columns: + fields += tuple(None for i in xrange(self.columns - len(fields))) + yield fields + + def __enter__(self): + return self + + def __exit__(self, _type, value, traceback): + self.file.close() + +class csv_writer(object): + """ + Writer object for tab delimited text. We just use the stock CSV + module in excel-tab mode for this. + + If "renmwo" is set (default), the file will be written to + a temporary name and renamed to the real filename after closing. + """ + + def __init__(self, filename, renmwo = True): + self.filename = filename + self.renmwo = "%s.~renmwo%d~" % (filename, os.getpid()) if renmwo else filename + self.file = open(self.renmwo, "w") + self.writer = csv.writer(self.file, dialect = csv.get_dialect("excel-tab")) + + def __enter__(self): + return self + + def __exit__(self, _type, value, traceback): + self.close() + + def close(self): + """ + Close this writer. + """ + if self.file is not None: + self.file.close() + self.file = None + if self.filename != self.renmwo: + os.rename(self.renmwo, self.filename) + + def __getattr__(self, attr): + """ + Fake inheritance from whatever object csv.writer deigns to give us. + """ + return getattr(self.writer, attr) diff --git a/rpki/daemonize.py b/rpki/daemonize.py new file mode 100644 index 00000000..62b4ee4e --- /dev/null +++ b/rpki/daemonize.py @@ -0,0 +1,133 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Some code borrowed from +# http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/ +# +# (which was explicitly placed in public domain by its author), and from +# +# /usr/src/lib/libc/gen/daemon.c +# +# (the libc implementation of daemon(3) on FreeBSD), so: +# +# Portions copyright (c) 1990, 1993 +# The Regents of the University of California. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 4. Neither the name of the University nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +# SUCH DAMAGE. + +""" +Make a normal program into a "daemon", like the 4.4BSD daemon(3) call. + +This doesn't quite follow either the 4.4BSD call or the Python 3.x library, +because it was written to fit into an existing package and I didn't +want to drag in yet another external library just for this. +""" + +import sys +import os +import atexit +import signal +import rpki.log + +# Does default_pid_directory need to be autoconf-configurable? + +## @var default_pid_directory +# Default directory to which to write process ID files. + +default_pid_directory = "/var/run/rpki" + +## @var pid_filename +# Configurable filename to which to write process ID file. +# pidfile argument to daemon() overrides this. + +pid_filename = None + +def daemon(nochdir = False, noclose = False, pidfile = None): + """ + Make this program become a daemon, like 4.4BSD daemon(3), and + write its pid out to a file with cleanup on exit. + """ + + if pidfile is None: + if pid_filename is None: + prog = os.path.splitext(os.path.basename(sys.argv[0]))[0] + pidfile = os.path.join(default_pid_directory, "%s.pid" % prog) + else: + pidfile = pid_filename + + old_sighup_action = signal.signal(signal.SIGHUP, signal.SIG_IGN) + + try: + pid = os.fork() + except OSError, e: + sys.exit("fork() failed: %d (%s)" % (e.errno, e.strerror)) + else: + if pid > 0: + os._exit(0) + + if not nochdir: + os.chdir("/") + + os.setsid() + + if not noclose: + sys.stdout.flush() + sys.stderr.flush() + fd = os.open(os.devnull, os.O_RDWR) + os.dup2(fd, 0) + os.dup2(fd, 1) + os.dup2(fd, 2) + if fd > 2: + os.close(fd) + + signal.signal(signal.SIGHUP, old_sighup_action) + + def delete_pid_file(): + try: + os.unlink(pidfile) + except OSError: + pass + + atexit.register(delete_pid_file) + + try: + f = open(pidfile, "w") + f.write("%d\n" % os.getpid()) + f.close() + except IOError, e: + rpki.log.warn("Couldn't write PID file %s: %s" % (pidfile, e.strerror)) diff --git a/rpki/exceptions.py b/rpki/exceptions.py new file mode 100644 index 00000000..d8d3774e --- /dev/null +++ b/rpki/exceptions.py @@ -0,0 +1,367 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Exception definitions for RPKI modules. +""" + +class RPKI_Exception(Exception): + """ + Base class for RPKI exceptions. + """ + +class NotInDatabase(RPKI_Exception): + """ + Lookup failed for an object expected to be in the database. + """ + +class BadURISyntax(RPKI_Exception): + """ + Illegal syntax for a URI. + """ + +class BadStatusCode(RPKI_Exception): + """ + Unrecognized protocol status code. + """ + +class BadQuery(RPKI_Exception): + """ + Unexpected protocol query. + """ + +class DBConsistancyError(RPKI_Exception): + """ + Found multiple matches for a database query that shouldn't ever + return that. + """ + +class CMSVerificationFailed(RPKI_Exception): + """ + Verification of a CMS message failed. + """ + +class HTTPRequestFailed(RPKI_Exception): + """ + HTTP request failed. + """ + +class DERObjectConversionError(RPKI_Exception): + """ + Error trying to convert a DER-based object from one representation + to another. + """ + +class NotACertificateChain(RPKI_Exception): + """ + Certificates don't form a proper chain. + """ + +class BadContactURL(RPKI_Exception): + """ + Error trying to parse contact URL. + """ + +class BadClassNameSyntax(RPKI_Exception): + """ + Illegal syntax for a class_name. + """ + +class BadIssueResponse(RPKI_Exception): + """ + issue_response PDU with wrong number of classes or certificates. + """ + +class NotImplementedYet(RPKI_Exception): + """ + Internal error -- not implemented yet. + """ + +class BadPKCS10(RPKI_Exception): + """ + Bad PKCS #10 object. + """ + +class UpstreamError(RPKI_Exception): + """ + Received an error from upstream. + """ + +class ChildNotFound(RPKI_Exception): + """ + Could not find specified child in database. + """ + +class BSCNotFound(RPKI_Exception): + """ + Could not find specified BSC in database. + """ + +class BadSender(RPKI_Exception): + """ + Unexpected XML sender value. + """ + +class ClassNameMismatch(RPKI_Exception): + """ + class_name does not match child context. + """ + +class ClassNameUnknown(RPKI_Exception): + """ + Unknown class_name. + """ + +class SKIMismatch(RPKI_Exception): + """ + SKI value in response does not match request. + """ + +class SubprocessError(RPKI_Exception): + """ + Subprocess returned unexpected error. + """ + +class BadIRDBReply(RPKI_Exception): + """ + Unexpected reply to IRDB query. + """ + +class NotFound(RPKI_Exception): + """ + Object not found in database. + """ + +class MustBePrefix(RPKI_Exception): + """ + Resource range cannot be expressed as a prefix. + """ + +class TLSValidationError(RPKI_Exception): + """ + TLS certificate validation error. + """ + +class MultipleTLSEECert(TLSValidationError): + """ + Received more than one TLS EE certificate. + """ + +class ReceivedTLSCACert(TLSValidationError): + """ + Received CA certificate via TLS. + """ + +class WrongEContentType(RPKI_Exception): + """ + Received wrong CMS eContentType. + """ + +class EmptyPEM(RPKI_Exception): + """ + Couldn't find PEM block to convert. + """ + +class UnexpectedCMSCerts(RPKI_Exception): + """ + Received CMS certs when not expecting any. + """ + +class UnexpectedCMSCRLs(RPKI_Exception): + """ + Received CMS CRLs when not expecting any. + """ + +class MissingCMSEEcert(RPKI_Exception): + """ + Didn't receive CMS EE cert when expecting one. + """ + +class MissingCMSCRL(RPKI_Exception): + """ + Didn't receive CMS CRL when expecting one. + """ + +class UnparsableCMSDER(RPKI_Exception): + """ + Alleged CMS DER wasn't parsable. + """ + +class CMSCRLNotSet(RPKI_Exception): + """ + CMS CRL has not been configured. + """ + +class ServerShuttingDown(RPKI_Exception): + """ + Server is shutting down. + """ + +class NoActiveCA(RPKI_Exception): + """ + No active ca_detail for specified class. + """ + +class BadClientURL(RPKI_Exception): + """ + URL given to HTTP client does not match profile. + """ + +class ClientNotFound(RPKI_Exception): + """ + Could not find specified client in database. + """ + +class BadExtension(RPKI_Exception): + """ + Forbidden X.509 extension. + """ + +class ForbiddenURI(RPKI_Exception): + """ + Forbidden URI, does not start with correct base URI. + """ + +class HTTPClientAborted(RPKI_Exception): + """ + HTTP client connection closed while in request-sent state. + """ + +class BadPublicationReply(RPKI_Exception): + """ + Unexpected reply to publication query. + """ + +class DuplicateObject(RPKI_Exception): + """ + Attempt to create an object that already exists. + """ + +class EmptyROAPrefixList(RPKI_Exception): + """ + Can't create ROA with an empty prefix list. + """ + +class NoCoveringCertForROA(RPKI_Exception): + """ + Couldn't find a covering certificate to generate ROA. + """ + +class BSCNotReady(RPKI_Exception): + """ + BSC not yet in a usable state, signing_cert not set. + """ + +class HTTPUnexpectedState(RPKI_Exception): + """ + HTTP event occurred in an unexpected state. + """ + +class HTTPBadVersion(RPKI_Exception): + """ + HTTP couldn't parse HTTP version. + """ + +class HandleTranslationError(RPKI_Exception): + """ + Internal error translating protocol handle -> SQL id. + """ + +class NoObjectAtURI(RPKI_Exception): + """ + No object published at specified URI. + """ + +class CMSContentNotSet(RPKI_Exception): + """ + Inner content of a CMS_object has not been set. If object is known + to be valid, the .extract() method should be able to set the + content; otherwise, only the .verify() method (which checks + signatures) is safe. + """ + +class HTTPTimeout(RPKI_Exception): + """ + HTTP connection timed out. + """ + +class BadIPResource(RPKI_Exception): + """ + Parse failure for alleged IP resource string. + """ + +class BadROAPrefix(RPKI_Exception): + """ + Parse failure for alleged ROA prefix string. + """ + +class CommandParseFailure(RPKI_Exception): + """ + Failed to parse command line. + """ + +class CMSCertHasExpired(RPKI_Exception): + """ + CMS certificate has expired. + """ + +class TrustedCMSCertHasExpired(RPKI_Exception): + """ + Trusted CMS certificate has expired. + """ + +class MultipleCMSEECert(RPKI_Exception): + """ + Can't have more than one CMS EE certificate in validation chain. + """ + +class ResourceOverlap(RPKI_Exception): + """ + Overlapping resources in resource_set. + """ + +class CMSReplay(RPKI_Exception): + """ + Possible CMS replay attack detected. + """ + +class PastNotAfter(RPKI_Exception): + """ + Requested notAfter value is already in the past. + """ + +class NullValidityInterval(RPKI_Exception): + """ + Requested validity interval is null. + """ + +class BadX510DN(RPKI_Exception): + """ + X.510 distinguished name does not match profile. + """ + +class BadAutonomousSystemNumber(RPKI_Exception): + """ + Bad AutonomousSystem number. + """ + +class WrongEKU(RPKI_Exception): + """ + Extended Key Usage extension does not match profile. + """ diff --git a/rpki/gui/__init__.py b/rpki/gui/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/__init__.py diff --git a/rpki/gui/api/__init__.py b/rpki/gui/api/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/api/__init__.py diff --git a/rpki/gui/api/urls.py b/rpki/gui/api/urls.py new file mode 100644 index 00000000..8c9d824c --- /dev/null +++ b/rpki/gui/api/urls.py @@ -0,0 +1,22 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls.defaults import * +from rpki.gui.routeview.api import route_list + +urlpatterns = patterns('', + (r'^v1/route/$', route_list), +) diff --git a/rpki/gui/app/TODO b/rpki/gui/app/TODO new file mode 100644 index 00000000..b7136397 --- /dev/null +++ b/rpki/gui/app/TODO @@ -0,0 +1,60 @@ +Use RequestContext (helper function for render_to_response) and a default +list of context processors for the generic functions + +Teach cert_delete about children, conf*, parent* to say what the ramifications +of deleting a cert are. + +Teach cert form about file upload + +Redirect /accounts/profile/ to /dashboard/ + +Teach dashboard view about looking up resources from parent. +There are 3 types of resources: +- Ones we've accepted and match +- Ones we've accepted but don't match + - two subtypes: + * the parent is now giving us a superset of what they used to. + This is relatively easily handled by keeping the subdivisions + we've made and just making the superset resource the new parent + of the existing resource (e.g., we had accepted 18.5.0.0/16 and + they're now giving us 18.0.0.0/8) + * the parent is now giving us a subset (including none) of what they + used to. Two sub-cases: + - The part that they took away is neither delegated nor roa'd. + - The part that they took away is either delegated or roa'd or both. +- Ones we haven't accepted yet + +The roa needs to learn to handle its prefix children. It may need to +create the covering set of prefixes for an address range. + +Un<something>'d resources are: +what we've gotten from our parent: +models.AddressRange.objects.filter(from_parent=myconf.pk) +minus what we've given to our children or issued roas for +models.AddressRange.objects.filter(child__conf=myconf.pk) +models.AddressRange.objects.filter(roa__conf=myconf.pk) +or +>>> from django.db.models import Q +>>> models.AddressRange.objects.filter( Q(child__conf=myconf.pk) | + Q(roa__conf=myconf.pk) ) + + +and of course the ASN one is easier: +models.Asn.objects.filter(from_parent=myconf.pk) +minus what we've given to our children +models.Asn.objects.filter(child__conf=myconf.pk) + +look in +rpki/resource_set.py + + +Adding a handle / resource-holding entity / "conf": +- upload the <identity> that we've generated and are sending to the parent + +Adding a parent: +- upload the <parent> that he sent me + (keep things open to the parent uploading this directly to the web interface) + +Adding a child: +- upload the <identity> that he sent me + diff --git a/rpki/gui/app/__init__.py b/rpki/gui/app/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/app/__init__.py diff --git a/rpki/gui/app/admin.py b/rpki/gui/app/admin.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/app/admin.py diff --git a/rpki/gui/app/check_expired.py b/rpki/gui/app/check_expired.py new file mode 100644 index 00000000..fcf5ecae --- /dev/null +++ b/rpki/gui/app/check_expired.py @@ -0,0 +1,209 @@ +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' +__all__ = ('notify_expired', 'NetworkError') + +import sys +import socket +from cStringIO import StringIO +import logging +import datetime + +from rpki.gui.cacheview.models import Cert +from rpki.gui.app.models import Conf, ResourceCert, Timestamp, Alert +from rpki.gui.app.glue import list_received_resources +from rpki.irdb import Zookeeper +from rpki.left_right import report_error_elt, list_published_objects_elt +from rpki.x509 import X509 + +from django.core.mail import send_mail + +logger = logging.getLogger(__name__) +expire_time = 0 # set by notify_expired() +now = 0 + + +def check_cert(handle, p, errs): + """Check the expiration date on the X.509 certificates in each element of + the list. + + The displayed object name defaults to the class name, but can be overridden + using the `object_name` argument. + + """ + t = p.certificate.getNotAfter() + if t <= expire_time: + e = 'expired' if t <= now else 'will expire' + errs.write("%(handle)s's %(type)s %(desc)s %(expire)s on %(date)s\n" % { + 'handle': handle, 'type': p.__class__.__name__, 'desc': str(p), + 'expire': e, 'date': t}) + + +def check_cert_list(handle, x, errs): + for p in x: + check_cert(handle, p, errs) + + +def check_expire(conf, errs): + # get certs for `handle' + cert_set = ResourceCert.objects.filter(conf=conf) + for cert in cert_set: + # look up cert in cacheview db + obj_set = Cert.objects.filter(repo__uri=cert.uri) + if not obj_set: + # since the <list_received_resources/> output is cached, this can + # occur if the cache is out of date as well.. + errs.write("Unable to locate rescert in rcynic cache: handle=%s uri=%s not_after=%s\n" % (conf.handle, cert.uri, cert.not_after)) + continue + obj = obj_set[0] + msg = [] + expired = False + for n, c in enumerate(obj.cert_chain): + if c.not_after <= expire_time: + expired = True + f = '*' + else: + f = ' ' + msg.append("%s [%d] uri=%s ski=%s name=%s expires=%s" % (f, n, c.repo.uri, c.keyid, c.name, c.not_after)) + + # find ghostbuster records attached to this cert + for gbr in c.ghostbusters.all(): + info = [] + for s in ('full_name', 'organization', 'email_address', 'telephone'): + t = getattr(gbr, s, None) + if t: + info.append(t) + + msg.append(" Contact: " + ", ".join(info)) + + if expired: + errs.write("%s's rescert from parent %s will expire soon:\n" % ( + conf.handle, + # parent is None for the root cert + cert.parent.handle if cert.parent else 'self' + )) + errs.write("Certificate chain:\n") + errs.write("\n".join(msg)) + errs.write("\n") + + +def check_child_certs(conf, errs): + """Fetch the list of published objects from rpkid, and inspect the issued + resource certs (uri ending in .cer). + + """ + z = Zookeeper(handle=conf.handle) + req = list_published_objects_elt.make_pdu(action="list", + tag="list_published_objects", + self_handle=conf.handle) + pdus = z.call_rpkid(req) + for pdu in pdus: + if isinstance(pdu, report_error_elt): + logger.error("rpkid reported an error: %s" % pdu.error_code) + elif isinstance(pdu, list_published_objects_elt): + if pdu.uri.endswith('.cer'): + cert = X509() + cert.set(Base64=pdu.obj) + t = cert.getNotAfter() + if t <= expire_time: + e = 'expired' if t <= now else 'will expire' + errs.write("%(handle)s's rescert for Child %(child)s %(expire)s on %(date)s uri=%(uri)s subject=%(subject)s\n" % { + 'handle': conf.handle, + 'child': pdu.child_handle, + 'uri': pdu.uri, + 'subject': cert.getSubject(), + 'expire': e, + 'date': t}) + + +class NetworkError(Exception): + pass + + +def notify_expired(expire_days=14, from_email=None): + """Send email notificates about impending expirations of resource + and BPKI certificates. + + expire_days: the number of days ahead of today to warn + + from_email: set the From: address for the email + + """ + global expire_time # so i don't have to pass it around + global now + + now = datetime.datetime.utcnow() + expire_time = now + datetime.timedelta(expire_days) + + # this is not exactly right, since we have no way of knowing what the + # vhost for the web portal running on this machine is + host = socket.getfqdn() + if not from_email: + from_email = 'root@' + host + + # Ensure that the rcynic and routeviews data has been updated recently + # The QuerySet is created here so that it will be cached and reused on each + # iteration of the loop below + t = now - datetime.timedelta(hours=12) # 12 hours + stale_timestamps = Timestamp.objects.filter(ts__lte=t) + + # if not arguments are given, query all resource holders + qs = Conf.objects.all() + + # check expiration of certs for all handles managed by the web portal + for h in qs: + # Force cache update since several checks require fresh data + try: + list_received_resources(sys.stdout, h) + except socket.error as e: + raise NetworkError('Error while talking to rpkid: %s' % e) + + errs = StringIO() + + # Warn the resource holder admins when data may be out of date + if stale_timestamps: + errs.write('Warning! Stale data from external sources.\n') + errs.write('data source : last import\n') + for obj in stale_timestamps: + errs.write('%-15s: %s\n' % (obj.name, obj.ts)) + errs.write('\n') + + check_cert(h.handle, h, errs) + + # HostedCA is the ResourceHolderCA cross certified under ServerCA, so + # check the ServerCA expiration date as well + check_cert(h.handle, h.hosted_by, errs) + check_cert(h.handle, h.hosted_by.issuer, errs) + + check_cert_list(h.handle, h.bscs.all(), errs) + check_cert_list(h.handle, h.parents.all(), errs) + check_cert_list(h.handle, h.children.all(), errs) + check_cert_list(h.handle, h.repositories.all(), errs) + + check_expire(h, errs) + check_child_certs(h, errs) + + # if there was output, display it now + s = errs.getvalue() + if s: + logger.info(s) + + t = """This is an automated notice about the upcoming expiration of RPKI resources for the handle %s on %s. You are receiving this notification because your email address is either registered in a Ghostbuster record, or as the default email address for the account.\n\n""" % (h.handle, host) + h.send_alert( + subject='RPKI expiration notice for %s' % h.handle, + message=t + s, + from_email=from_email, + severity=Alert.WARNING + ) diff --git a/rpki/gui/app/forms.py b/rpki/gui/app/forms.py new file mode 100644 index 00000000..20ce4a07 --- /dev/null +++ b/rpki/gui/app/forms.py @@ -0,0 +1,442 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + + +from django.contrib.auth.models import User +from django import forms +from rpki.resource_set import (resource_range_as, resource_range_ip) +from rpki.gui.app import models +from rpki.exceptions import BadIPResource +from rpki.POW import IPAddress + + +class AddConfForm(forms.Form): + handle = forms.CharField(required=True, + help_text='your handle for your rpki instance') + run_rpkid = forms.BooleanField(required=False, initial=True, + label='Run rpkid?', + help_text='do you want to run your own instance of rpkid?') + rpkid_server_host = forms.CharField(initial='rpkid.example.org', + label='rpkid hostname', + help_text='publicly visible hostname for your rpkid instance') + rpkid_server_port = forms.IntegerField(initial=4404, + label='rpkid port') + run_pubd = forms.BooleanField(required=False, initial=False, + label='Run pubd?', + help_text='do you want to run your own instance of pubd?') + pubd_server_host = forms.CharField(initial='pubd.example.org', + label='pubd hostname', + help_text='publicly visible hostname for your pubd instance') + pubd_server_port = forms.IntegerField(initial=4402, label='pubd port') + pubd_contact_info = forms.CharField(initial='repo-man@rpki.example.org', + label='Pubd contact', + help_text='email address for the operator of your pubd instance') + + +class GhostbusterRequestForm(forms.ModelForm): + """ + Generate a ModelForm with the subset of parents for the current + resource handle. + """ + # override default form field + parent = forms.ModelChoiceField(queryset=None, required=False, + help_text='Specify specific parent, or none for all parents') + + #override + issuer = forms.ModelChoiceField(queryset=None, widget=forms.HiddenInput) + + def __init__(self, *args, **kwargs): + conf = kwargs.pop('conf') + # override initial value for conf in case user tries to alter it + initial = kwargs.setdefault('initial', {}) + initial['issuer'] = conf + super(GhostbusterRequestForm, self).__init__(*args, **kwargs) + self.fields['parent'].queryset = conf.parents.all() + self.fields['issuer'].queryset = models.Conf.objects.filter(pk=conf.pk) + + class Meta: + model = models.GhostbusterRequest + exclude = ('vcard', 'given_name', 'family_name', 'additional_name', + 'honorific_prefix', 'honorific_suffix') + + def clean(self): + email = self.cleaned_data.get('email_address') + postal = self.cleaned_data.get('postal_address') + telephone = self.cleaned_data.get('telephone') + if not any([email, postal, telephone]): + raise forms.ValidationError( + 'One of telephone, email or postal address must be specified') + + return self.cleaned_data + + +class ImportForm(forms.Form): + """Form used for uploading parent/child identity xml files.""" + handle = forms.CharField(required=False, + widget=forms.TextInput(attrs={'class': 'xlarge'}), + help_text='Optional. Your name for this entity, or blank to accept name in XML') + xml = forms.FileField(label='XML file') + + +class ImportRepositoryForm(forms.Form): + handle = forms.CharField(max_length=30, required=False, + label='Parent Handle', + help_text='Optional. Must be specified if you use a different name for this parent') + xml = forms.FileField(label='XML file') + + +class ImportClientForm(forms.Form): + """Form used for importing publication client requests.""" + xml = forms.FileField(label='XML file') + + +class ImportCSVForm(forms.Form): + csv = forms.FileField(label='CSV file') + + +class UserCreateForm(forms.Form): + username = forms.CharField(max_length=30) + email = forms.CharField(max_length=30, + help_text='email address for new user') + password = forms.CharField(widget=forms.PasswordInput) + password2 = forms.CharField(widget=forms.PasswordInput, + label='Confirm Password') + resource_holders = forms.ModelMultipleChoiceField( + queryset=models.Conf.objects.all(), + help_text='allowed to manage these resource holders' + + ) + + def clean_username(self): + username = self.cleaned_data.get('username') + if User.objects.filter(username=username).exists(): + raise forms.ValidationError('user already exists') + return username + + def clean(self): + p1 = self.cleaned_data.get('password') + p2 = self.cleaned_data.get('password2') + if p1 != p2: + raise forms.ValidationError('passwords do not match') + return self.cleaned_data + + +class UserEditForm(forms.Form): + """Form for editing a user.""" + email = forms.CharField() + pw = forms.CharField(widget=forms.PasswordInput, label='Password', + required=False) + pw2 = forms.CharField(widget=forms.PasswordInput, label='Confirm password', + required=False) + resource_holders = forms.ModelMultipleChoiceField( + queryset=models.Conf.objects.all(), + help_text='allowed to manage these resource holders' + ) + + def clean(self): + p1 = self.cleaned_data.get('pw') + p2 = self.cleaned_data.get('pw2') + if p1 != p2: + raise forms.ValidationError('Passwords do not match') + return self.cleaned_data + + +class ROARequest(forms.Form): + """Form for entering a ROA request. + + Handles both IPv4 and IPv6.""" + + prefix = forms.CharField( + widget=forms.TextInput(attrs={ + 'autofocus': 'true', 'placeholder': 'Prefix', + 'class': 'span4' + }) + ) + max_prefixlen = forms.CharField( + required=False, + widget=forms.TextInput(attrs={ + 'placeholder': 'Max len', + 'class': 'span1' + }) + ) + asn = forms.IntegerField( + widget=forms.TextInput(attrs={ + 'placeholder': 'ASN', + 'class': 'span1' + }) + ) + confirmed = forms.BooleanField(widget=forms.HiddenInput, required=False) + + def __init__(self, *args, **kwargs): + """Takes an optional `conf` keyword argument specifying the user that + is creating the ROAs. It is used for validating that the prefix the + user entered is currently allocated to that user. + + """ + conf = kwargs.pop('conf', None) + kwargs['auto_id'] = False + super(ROARequest, self).__init__(*args, **kwargs) + self.conf = conf + self.inline = True + self.use_table = False + + def _as_resource_range(self): + """Convert the prefix in the form to a + rpki.resource_set.resource_range_ip object. + + If there is no mask provided, assume the closest classful mask. + + """ + prefix = self.cleaned_data.get('prefix') + if '/' not in prefix: + p = IPAddress(prefix) + + # determine the first nonzero bit starting from the lsb and + # subtract from the address size to find the closest classful + # mask that contains this single address + prefixlen = 0 + while (p != 0) and (p & 1) == 0: + prefixlen = prefixlen + 1 + p = p >> 1 + mask = p.bits - (8 * (prefixlen / 8)) + prefix = prefix + '/' + str(mask) + + return resource_range_ip.parse_str(prefix) + + def clean_asn(self): + value = self.cleaned_data.get('asn') + if value < 0: + raise forms.ValidationError('AS must be a positive value or 0') + return value + + def clean_prefix(self): + try: + r = self._as_resource_range() + except: + raise forms.ValidationError('invalid prefix') + + manager = models.ResourceRangeAddressV4 if r.version == 4 else models.ResourceRangeAddressV6 + if not manager.objects.filter(cert__conf=self.conf, + prefix_min__lte=r.min, + prefix_max__gte=r.max).exists(): + raise forms.ValidationError('prefix is not allocated to you') + return str(r) + + def clean_max_prefixlen(self): + v = self.cleaned_data.get('max_prefixlen') + if v: + if v[0] == '/': + v = v[1:] # allow user to specify /24 + try: + if int(v) < 0: + raise forms.ValidationError('max prefix length must be positive or 0') + except ValueError: + raise forms.ValidationError('invalid integer value') + return v + + def clean(self): + if 'prefix' in self.cleaned_data: + r = self._as_resource_range() + max_prefixlen = self.cleaned_data.get('max_prefixlen') + max_prefixlen = int(max_prefixlen) if max_prefixlen else r.prefixlen() + if max_prefixlen < r.prefixlen(): + raise forms.ValidationError( + 'max prefix length must be greater than or equal to the prefix length') + if max_prefixlen > r.min.bits: + raise forms.ValidationError, \ + 'max prefix length (%d) is out of range for IP version (%d)' % (max_prefixlen, r.min.bits) + self.cleaned_data['max_prefixlen'] = str(max_prefixlen) + return self.cleaned_data + + +class ROARequestConfirm(forms.Form): + asn = forms.IntegerField(widget=forms.HiddenInput) + prefix = forms.CharField(widget=forms.HiddenInput) + max_prefixlen = forms.IntegerField(widget=forms.HiddenInput) + + def clean_asn(self): + value = self.cleaned_data.get('asn') + if value < 0: + raise forms.ValidationError('AS must be a positive value or 0') + return value + + def clean_prefix(self): + try: + r = resource_range_ip.parse_str(self.cleaned_data.get('prefix')) + except BadIPResource: + raise forms.ValidationError('invalid prefix') + return str(r) + + def clean(self): + try: + r = resource_range_ip.parse_str(self.cleaned_data.get('prefix')) + if r.prefixlen() > self.cleaned_data.get('max_prefixlen'): + raise forms.ValidationError('max length is smaller than mask') + except BadIPResource: + pass + return self.cleaned_data + + +class AddASNForm(forms.Form): + """ + Returns a forms.Form subclass which verifies that the entered ASN range + does not overlap with a previous allocation to the specified child, and + that the ASN range is within the range allocated to the parent. + + """ + + asns = forms.CharField( + label='ASNs', + help_text='single ASN or range', + widget=forms.TextInput(attrs={'autofocus': 'true'}) + ) + + def __init__(self, *args, **kwargs): + self.child = kwargs.pop('child') + super(AddASNForm, self).__init__(*args, **kwargs) + + def clean_asns(self): + try: + r = resource_range_as.parse_str(self.cleaned_data.get('asns')) + except: + raise forms.ValidationError('invalid AS or range') + + if not models.ResourceRangeAS.objects.filter( + cert__conf=self.child.issuer, + min__lte=r.min, + max__gte=r.max).exists(): + raise forms.ValidationError('AS or range is not delegated to you') + + # determine if the entered range overlaps with any AS already + # allocated to this child + if self.child.asns.filter(end_as__gte=r.min, start_as__lte=r.max).exists(): + raise forms.ValidationError( + 'Overlap with previous allocation to this child') + + return str(r) + + +class AddNetForm(forms.Form): + """ + Returns a forms.Form subclass which validates that the entered address + range is within the resources allocated to the parent, and does not overlap + with what is already allocated to the specified child. + + """ + address_range = forms.CharField( + help_text='CIDR or range', + widget=forms.TextInput(attrs={'autofocus': 'true'}) + ) + + def __init__(self, *args, **kwargs): + self.child = kwargs.pop('child') + super(AddNetForm, self).__init__(*args, **kwargs) + + def clean_address_range(self): + address_range = self.cleaned_data.get('address_range') + try: + r = resource_range_ip.parse_str(address_range) + if r.version == 6: + qs = models.ResourceRangeAddressV6 + version = 'IPv6' + else: + qs = models.ResourceRangeAddressV4 + version = 'IPv4' + except BadIPResource: + raise forms.ValidationError('invalid IP address range') + + if not qs.objects.filter(cert__conf=self.child.issuer, + prefix_min__lte=r.min, + prefix_max__gte=r.max).exists(): + raise forms.ValidationError( + 'IP address range is not delegated to you') + + # determine if the entered range overlaps with any prefix + # already allocated to this child + for n in self.child.address_ranges.filter(version=version): + rng = n.as_resource_range() + if r.max >= rng.min and r.min <= rng.max: + raise forms.ValidationError( + 'Overlap with previous allocation to this child') + + return str(r) + + +def ChildForm(instance): + """ + Form for editing a Child model. + + This is roughly based on the equivalent ModelForm, but uses Form as a base + class so that selection boxes for the AS and Prefixes can be edited in a + single form. + + """ + + class _wrapped(forms.Form): + valid_until = forms.DateTimeField(initial=instance.valid_until) + as_ranges = forms.ModelMultipleChoiceField(queryset=models.ChildASN.objects.filter(child=instance), + required=False, + label='AS Ranges', + help_text='deselect to remove delegation') + address_ranges = forms.ModelMultipleChoiceField(queryset=models.ChildNet.objects.filter(child=instance), + required=False, + help_text='deselect to remove delegation') + + return _wrapped + + +class Empty(forms.Form): + """Stub form for views requiring confirmation.""" + pass + + +class ResourceHolderForm(forms.Form): + """form for editing ACL on Conf objects.""" + users = forms.ModelMultipleChoiceField( + queryset=User.objects.all(), + help_text='users allowed to mange this resource holder' + ) + + +class ResourceHolderCreateForm(forms.Form): + """form for creating new resource holdres.""" + handle = forms.CharField(max_length=30) + parent = forms.ModelChoiceField( + required=False, + queryset=models.Conf.objects.all(), + help_text='optionally make the new resource holder a child of this resource holder' + ) + users = forms.ModelMultipleChoiceField( + required=False, + queryset=User.objects.all(), + help_text='users allowed to mange this resource holder' + ) + + def clean_handle(self): + handle = self.cleaned_data.get('handle') + if models.Conf.objects.filter(handle=handle).exists(): + raise forms.ValidationError( + 'a resource holder with that handle already exists' + ) + return handle + + def clean(self): + handle = self.cleaned_data.get('handle') + parent = self.cleaned_data.get('parent') + if handle and parent and parent.children.filter(handle=handle).exists(): + raise forms.ValidationError('parent already has a child by that name') + return self.cleaned_data diff --git a/rpki/gui/app/glue.py b/rpki/gui/app/glue.py new file mode 100644 index 00000000..a9f6441e --- /dev/null +++ b/rpki/gui/app/glue.py @@ -0,0 +1,132 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This file contains code that interfaces between the django views implementing +the portal gui and the rpki.* modules. + +""" + +from __future__ import with_statement + +__version__ = '$Id$' + +from datetime import datetime + +from rpki.resource_set import (resource_set_as, resource_set_ipv4, + resource_set_ipv6, resource_range_ipv4, + resource_range_ipv6) +from rpki.left_right import list_received_resources_elt, report_error_elt +from rpki.irdb.zookeeper import Zookeeper +from rpki.gui.app import models +from rpki.exceptions import BadIPResource + +from django.contrib.auth.models import User +from django.db.transaction import commit_on_success + + +def ghostbuster_to_vcard(gbr): + """Convert a GhostbusterRequest object into a vCard object.""" + import vobject + + vcard = vobject.vCard() + vcard.add('N').value = vobject.vcard.Name(family=gbr.family_name, + given=gbr.given_name) + + adr_fields = ['box', 'extended', 'street', 'city', 'region', 'code', + 'country'] + adr_dict = dict((f, getattr(gbr, f, '')) for f in adr_fields) + if any(adr_dict.itervalues()): + vcard.add('ADR').value = vobject.vcard.Address(**adr_dict) + + # mapping from vCard type to Ghostbuster model field + # the ORG type is a sequence of organization unit names, so + # transform the org name into a tuple before stuffing into the + # vCard object + attrs = [('FN', 'full_name', None), + ('TEL', 'telephone', None), + ('ORG', 'organization', lambda x: (x,)), + ('EMAIL', 'email_address', None)] + for vtype, field, transform in attrs: + v = getattr(gbr, field) + if v: + vcard.add(vtype).value = transform(v) if transform else v + return vcard.serialize() + + +class LeftRightError(Exception): + """Class for wrapping report_error_elt errors from Zookeeper.call_rpkid(). + + It expects a single argument, which is the associated report_error_elt instance.""" + + def __str__(self): + return 'Error occurred while communicating with rpkid: handle=%s code=%s text=%s' % ( + self.args[0].self_handle, + self.args[0].error_code, + self.args[0].error_text) + + +@commit_on_success +def list_received_resources(log, conf): + """ + Query rpkid for this resource handle's received resources. + + The semantics are to clear the entire table and populate with the list of + certs received. Other models should not reference the table directly with + foreign keys. + + """ + + z = Zookeeper(handle=conf.handle) + pdus = z.call_rpkid(list_received_resources_elt.make_pdu(self_handle=conf.handle)) + # pdus is sometimes None (see https://trac.rpki.net/ticket/681) + if pdus is None: + print >>log, 'error: call_rpkid() returned None for handle %s when fetching received resources' % conf.handle + return + + models.ResourceCert.objects.filter(conf=conf).delete() + + for pdu in pdus: + if isinstance(pdu, report_error_elt): + # this will cause the db to be rolled back so the above delete() + # won't clobber existing resources + raise LeftRightError, pdu + elif isinstance(pdu, list_received_resources_elt): + if pdu.parent_handle != conf.handle: + parent = models.Parent.objects.get(issuer=conf, + handle=pdu.parent_handle) + else: + # root cert, self-signed + parent = None + + not_before = datetime.strptime(pdu.notBefore, "%Y-%m-%dT%H:%M:%SZ") + not_after = datetime.strptime(pdu.notAfter, "%Y-%m-%dT%H:%M:%SZ") + + cert = models.ResourceCert.objects.create( + conf=conf, parent=parent, not_before=not_before, + not_after=not_after, uri=pdu.uri) + + for asn in resource_set_as(pdu.asn): + cert.asn_ranges.create(min=asn.min, max=asn.max) + + for rng in resource_set_ipv4(pdu.ipv4): + cert.address_ranges.create(prefix_min=rng.min, + prefix_max=rng.max) + + for rng in resource_set_ipv6(pdu.ipv6): + cert.address_ranges_v6.create(prefix_min=rng.min, + prefix_max=rng.max) + else: + print >>log, "error: unexpected pdu from rpkid type=%s" % type(pdu) diff --git a/rpki/gui/app/migrations/0001_initial.py b/rpki/gui/app/migrations/0001_initial.py new file mode 100644 index 00000000..80877901 --- /dev/null +++ b/rpki/gui/app/migrations/0001_initial.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'ResourceCert' + db.create_table('app_resourcecert', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('parent', self.gf('django.db.models.fields.related.ForeignKey')(related_name='certs', to=orm['irdb.Parent'])), + ('not_before', self.gf('django.db.models.fields.DateTimeField')()), + ('not_after', self.gf('django.db.models.fields.DateTimeField')()), + ('uri', self.gf('django.db.models.fields.CharField')(max_length=255)), + )) + db.send_create_signal('app', ['ResourceCert']) + + # Adding model 'ResourceRangeAddressV4' + db.create_table('app_resourcerangeaddressv4', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('prefix_min', self.gf('rpki.gui.models.IPv4AddressField')(db_index=True)), + ('prefix_max', self.gf('rpki.gui.models.IPv4AddressField')(db_index=True)), + ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='address_ranges', to=orm['app.ResourceCert'])), + )) + db.send_create_signal('app', ['ResourceRangeAddressV4']) + + # Adding model 'ResourceRangeAddressV6' + db.create_table('app_resourcerangeaddressv6', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('prefix_min', self.gf('rpki.gui.models.IPv6AddressField')(db_index=True)), + ('prefix_max', self.gf('rpki.gui.models.IPv6AddressField')(db_index=True)), + ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='address_ranges_v6', to=orm['app.ResourceCert'])), + )) + db.send_create_signal('app', ['ResourceRangeAddressV6']) + + # Adding model 'ResourceRangeAS' + db.create_table('app_resourcerangeas', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('min', self.gf('django.db.models.fields.PositiveIntegerField')()), + ('max', self.gf('django.db.models.fields.PositiveIntegerField')()), + ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='asn_ranges', to=orm['app.ResourceCert'])), + )) + db.send_create_signal('app', ['ResourceRangeAS']) + + # Adding model 'GhostbusterRequest' + db.create_table('app_ghostbusterrequest', ( + ('ghostbusterrequest_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['irdb.GhostbusterRequest'], unique=True, primary_key=True)), + ('full_name', self.gf('django.db.models.fields.CharField')(max_length=40)), + ('family_name', self.gf('django.db.models.fields.CharField')(max_length=20)), + ('given_name', self.gf('django.db.models.fields.CharField')(max_length=20)), + ('additional_name', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)), + ('honorific_prefix', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)), + ('honorific_suffix', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)), + ('email_address', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)), + ('organization', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), + ('telephone', self.gf('rpki.gui.app.models.TelephoneField')(max_length=40, null=True, blank=True)), + ('box', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('extended', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), + ('street', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), + ('city', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('region', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('code', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('country', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + )) + db.send_create_signal('app', ['GhostbusterRequest']) + + # Adding model 'Timestamp' + db.create_table('app_timestamp', ( + ('name', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)), + ('ts', self.gf('django.db.models.fields.DateTimeField')()), + )) + db.send_create_signal('app', ['Timestamp']) + + + def backwards(self, orm): + # Deleting model 'ResourceCert' + db.delete_table('app_resourcecert') + + # Deleting model 'ResourceRangeAddressV4' + db.delete_table('app_resourcerangeaddressv4') + + # Deleting model 'ResourceRangeAddressV6' + db.delete_table('app_resourcerangeaddressv6') + + # Deleting model 'ResourceRangeAS' + db.delete_table('app_resourcerangeas') + + # Deleting model 'GhostbusterRequest' + db.delete_table('app_ghostbusterrequest') + + # Deleting model 'Timestamp' + db.delete_table('app_timestamp') + + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app']
\ No newline at end of file diff --git a/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py b/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py new file mode 100644 index 00000000..d3326f90 --- /dev/null +++ b/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding field 'ResourceCert.conf' + db.add_column('app_resourcecert', 'conf', + self.gf('django.db.models.fields.related.ForeignKey')(related_name='certs', null=True, to=orm['irdb.ResourceHolderCA']), + keep_default=False) + + + def backwards(self, orm): + # Deleting field 'ResourceCert.conf' + db.delete_column('app_resourcecert', 'conf_id') + + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app']
\ No newline at end of file diff --git a/rpki/gui/app/migrations/0003_set_conf_from_parent.py b/rpki/gui/app/migrations/0003_set_conf_from_parent.py new file mode 100644 index 00000000..a90a11cc --- /dev/null +++ b/rpki/gui/app/migrations/0003_set_conf_from_parent.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." + for cert in orm.ResourceCert.objects.all(): + cert.conf = cert.parent.issuer + cert.save() + + def backwards(self, orm): + "Write your backwards methods here." + pass + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] + symmetrical = True diff --git a/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py b/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py new file mode 100644 index 00000000..a236ad4a --- /dev/null +++ b/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Changing field 'ResourceCert.conf' + db.alter_column('app_resourcecert', 'conf_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['irdb.ResourceHolderCA'])) + + def backwards(self, orm): + + # Changing field 'ResourceCert.conf' + db.alter_column('app_resourcecert', 'conf_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['irdb.ResourceHolderCA'])) + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] diff --git a/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py b/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py new file mode 100644 index 00000000..11e9c814 --- /dev/null +++ b/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Changing field 'ResourceCert.parent' + db.alter_column('app_resourcecert', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['irdb.Parent'])) + + def backwards(self, orm): + + # Changing field 'ResourceCert.parent' + db.alter_column('app_resourcecert', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['irdb.Parent'])) + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app']
\ No newline at end of file diff --git a/rpki/gui/app/migrations/0006_add_conf_acl.py b/rpki/gui/app/migrations/0006_add_conf_acl.py new file mode 100644 index 00000000..88fe8171 --- /dev/null +++ b/rpki/gui/app/migrations/0006_add_conf_acl.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'ConfACL' + db.create_table('app_confacl', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('conf', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['irdb.ResourceHolderCA'])), + ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), + )) + db.send_create_signal('app', ['ConfACL']) + + # Adding unique constraint on 'ConfACL', fields ['user', 'conf'] + db.create_unique('app_confacl', ['user_id', 'conf_id']) + + + def backwards(self, orm): + # Removing unique constraint on 'ConfACL', fields ['user', 'conf'] + db.delete_unique('app_confacl', ['user_id', 'conf_id']) + + # Deleting model 'ConfACL' + db.delete_table('app_confacl') + + + models = { + 'app.confacl': { + 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app']
\ No newline at end of file diff --git a/rpki/gui/app/migrations/0007_default_acls.py b/rpki/gui/app/migrations/0007_default_acls.py new file mode 100644 index 00000000..40656d0f --- /dev/null +++ b/rpki/gui/app/migrations/0007_default_acls.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models +from django.core.exceptions import ObjectDoesNotExist + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." + for conf in orm['irdb.ResourceHolderCA'].objects.all(): + try: + user = orm['auth.User'].objects.get(username=conf.handle) + orm['app.ConfACL'].objects.create( + conf=conf, + user=user + ) + except ObjectDoesNotExist: + pass + + def backwards(self, orm): + "Write your backwards methods here." + orm['app.ConfACL'].objects.all().delete() + + models = { + 'app.confacl': { + 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] + symmetrical = True diff --git a/rpki/gui/app/migrations/0008_add_alerts.py b/rpki/gui/app/migrations/0008_add_alerts.py new file mode 100644 index 00000000..77af68d2 --- /dev/null +++ b/rpki/gui/app/migrations/0008_add_alerts.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'Alert' + db.create_table('app_alert', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('conf', self.gf('django.db.models.fields.related.ForeignKey')(related_name='alerts', to=orm['irdb.ResourceHolderCA'])), + ('severity', self.gf('django.db.models.fields.SmallIntegerField')(default=0)), + ('when', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), + ('seen', self.gf('django.db.models.fields.BooleanField')(default=False)), + ('subject', self.gf('django.db.models.fields.CharField')(max_length=66)), + ('text', self.gf('django.db.models.fields.TextField')()), + )) + db.send_create_signal('app', ['Alert']) + + + def backwards(self, orm): + # Deleting model 'Alert' + db.delete_table('app_alert') + + + models = { + 'app.alert': { + 'Meta': {'object_name': 'Alert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'alerts'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'seen': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'severity': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'subject': ('django.db.models.fields.CharField', [], {'max_length': '66'}), + 'text': ('django.db.models.fields.TextField', [], {}), + 'when': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) + }, + 'app.confacl': { + 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app']
\ No newline at end of file diff --git a/rpki/gui/app/migrations/__init__.py b/rpki/gui/app/migrations/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/app/migrations/__init__.py diff --git a/rpki/gui/app/models.py b/rpki/gui/app/models.py new file mode 100644 index 00000000..7d643fdc --- /dev/null +++ b/rpki/gui/app/models.py @@ -0,0 +1,420 @@ +# Copyright (C) 2010 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.db import models +from django.contrib.auth.models import User +from django.core.mail import send_mail + +import rpki.resource_set +import rpki.exceptions +import rpki.irdb.models +import rpki.gui.models +import rpki.gui.routeview.models +from south.modelsinspector import add_introspection_rules + + +class TelephoneField(models.CharField): + def __init__(self, **kwargs): + if 'max_length' not in kwargs: + kwargs['max_length'] = 40 + models.CharField.__init__(self, **kwargs) + +add_introspection_rules([], ['^rpki\.gui\.app\.models\.TelephoneField']) + + +class Parent(rpki.irdb.models.Parent): + """proxy model for irdb Parent""" + + def __unicode__(self): + return u"%s's parent %s" % (self.issuer.handle, self.handle) + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.parent_detail', [str(self.pk)]) + + class Meta: + proxy = True + + +class Child(rpki.irdb.models.Child): + """proxy model for irdb Child""" + + def __unicode__(self): + return u"%s's child %s" % (self.issuer.handle, self.handle) + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.child_detail', [str(self.pk)]) + + class Meta: + proxy = True + verbose_name_plural = 'children' + + +class ChildASN(rpki.irdb.models.ChildASN): + """Proxy model for irdb ChildASN.""" + + class Meta: + proxy = True + + def __unicode__(self): + return u'AS%s' % self.as_resource_range() + + +class ChildNet(rpki.irdb.models.ChildNet): + """Proxy model for irdb ChildNet.""" + + class Meta: + proxy = True + + def __unicode__(self): + return u'%s' % self.as_resource_range() + + +class Alert(models.Model): + """Stores alert messages intended to be consumed by the user.""" + + INFO = 0 + WARNING = 1 + ERROR = 2 + + SEVERITY_CHOICES = ( + (INFO, 'info'), + (WARNING, 'warning'), + (ERROR, 'error'), + ) + + conf = models.ForeignKey('Conf', related_name='alerts') + severity = models.SmallIntegerField(choices=SEVERITY_CHOICES, default=INFO) + when = models.DateTimeField(auto_now_add=True) + seen = models.BooleanField(default=False) + subject = models.CharField(max_length=66) + text = models.TextField() + + @models.permalink + def get_absolute_url(self): + return ('alert-detail', [str(self.pk)]) + + +class Conf(rpki.irdb.models.ResourceHolderCA): + """This is the center of the universe, also known as a place to + have a handle on a resource-holding entity. It's the <self> + in the rpkid schema. + + """ + @property + def parents(self): + """Simulates irdb.models.Parent.objects, but returns app.models.Parent + proxy objects. + + """ + return Parent.objects.filter(issuer=self) + + @property + def children(self): + """Simulates irdb.models.Child.objects, but returns app.models.Child + proxy objects. + + """ + return Child.objects.filter(issuer=self) + + @property + def ghostbusters(self): + return GhostbusterRequest.objects.filter(issuer=self) + + @property + def repositories(self): + return Repository.objects.filter(issuer=self) + + @property + def roas(self): + return ROARequest.objects.filter(issuer=self) + + @property + def routes(self): + """Return all IPv4 routes covered by RPKI certs issued to this resource + holder. + + """ + # build a Q filter to select all RouteOrigin objects covered by + # prefixes in the resource holder's certificates + q = models.Q() + for p in ResourceRangeAddressV4.objects.filter(cert__conf=self): + q |= models.Q(prefix_min__gte=p.prefix_min, + prefix_max__lte=p.prefix_max) + return RouteOrigin.objects.filter(q) + + @property + def routes_v6(self): + """Return all IPv6 routes covered by RPKI certs issued to this resource + holder. + + """ + # build a Q filter to select all RouteOrigin objects covered by + # prefixes in the resource holder's certificates + q = models.Q() + for p in ResourceRangeAddressV6.objects.filter(cert__conf=self): + q |= models.Q(prefix_min__gte=p.prefix_min, + prefix_max__lte=p.prefix_max) + return RouteOriginV6.objects.filter(q) + + def send_alert(self, subject, message, from_email, severity=Alert.INFO): + """Store an alert for this resource holder.""" + self.alerts.create(subject=subject, text=message, severity=severity) + + send_mail( + subject=subject, + message=message, + from_email=from_email, + recipient_list=self.email_list + ) + + @property + def email_list(self): + """Return a list of the contact emails for this resource holder. + + Contact emails are extract from any ghostbuster requests, and any + linked user accounts. + + """ + notify_emails = [gbr.email_address for gbr in self.ghostbusters if gbr.email_address] + notify_emails.extend( + [acl.user.email for acl in ConfACL.objects.filter(conf=self) if acl.user.email] + ) + return notify_emails + + def clear_alerts(self): + self.alerts.all().delete() + + + class Meta: + proxy = True + + +class ResourceCert(models.Model): + """Represents a resource certificate. + + This model is used to cache the output of <list_received_resources/>. + + """ + + # Handle to which this cert was issued + conf = models.ForeignKey(Conf, related_name='certs') + + # The parent that issued the cert. This field is marked null=True because + # the root has no parent + parent = models.ForeignKey(Parent, related_name='certs', null=True) + + # certificate validity period + not_before = models.DateTimeField() + not_after = models.DateTimeField() + + # Locator for this object. Used to look up the validation status, expiry + # of ancestor certs in cacheview + uri = models.CharField(max_length=255) + + def __unicode__(self): + if self.parent: + return u"%s's cert from %s" % (self.conf.handle, + self.parent.handle) + else: + return u"%s's root cert" % self.conf.handle + + def get_cert_chain(self): + """Return a list containing the complete certificate chain for this + certificate.""" + cert = self + x = [cert] + while cert.issuer: + cert = cert.issuer + x.append(cert) + x.reverse() + return x + cert_chain = property(get_cert_chain) + + +class ResourceRangeAddressV4(rpki.gui.models.PrefixV4): + cert = models.ForeignKey(ResourceCert, related_name='address_ranges') + + +class ResourceRangeAddressV6(rpki.gui.models.PrefixV6): + cert = models.ForeignKey(ResourceCert, related_name='address_ranges_v6') + + +class ResourceRangeAS(rpki.gui.models.ASN): + cert = models.ForeignKey(ResourceCert, related_name='asn_ranges') + + +class ROARequest(rpki.irdb.models.ROARequest): + class Meta: + proxy = True + + def __unicode__(self): + return u"%s's ROA request for AS%d" % (self.issuer.handle, self.asn) + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.roa_detail', [str(self.pk)]) + + @property + def routes(self): + "Return all IPv4 routes covered by this roa prefix." + # this assumes one prefix per ROA + rng = self.prefixes.filter(version=4)[0].as_resource_range() + return rpki.gui.routeview.models.RouteOrigin.objects.filter( + prefix_min__gte=rng.min, + prefix_max__lte=rng.max + ) + + @property + def routes_v6(self): + "Return all IPv6 routes covered by this roa prefix." + # this assumes one prefix per ROA + rng = self.prefixes.filter(version=6)[0].as_resource_range() + return rpki.gui.routeview.models.RouteOriginV6.objects.filter( + prefix_min__gte=rng.min, + prefix_max__lte=rng.max + ) + + +class ROARequestPrefix(rpki.irdb.models.ROARequestPrefix): + class Meta: + proxy = True + + def __unicode__(self): + return u'ROA Request Prefix %s' % str(self.as_roa_prefix()) + + +class GhostbusterRequest(rpki.irdb.models.GhostbusterRequest): + """ + Stores the information require to fill out a vCard entry to + populate a ghostbusters record. + + This model is inherited from the irdb GhostBusterRequest model so + that the broken out fields can be included for ease of editing. + """ + + full_name = models.CharField(max_length=40) + + # components of the vCard N type + family_name = models.CharField(max_length=20) + given_name = models.CharField(max_length=20) + additional_name = models.CharField(max_length=20, blank=True, null=True) + honorific_prefix = models.CharField(max_length=10, blank=True, null=True) + honorific_suffix = models.CharField(max_length=10, blank=True, null=True) + + email_address = models.EmailField(blank=True, null=True) + organization = models.CharField(blank=True, null=True, max_length=255) + telephone = TelephoneField(blank=True, null=True) + + # elements of the ADR type + box = models.CharField(verbose_name='P.O. Box', blank=True, null=True, + max_length=40) + extended = models.CharField(blank=True, null=True, max_length=255) + street = models.CharField(blank=True, null=True, max_length=255) + city = models.CharField(blank=True, null=True, max_length=40) + region = models.CharField(blank=True, null=True, max_length=40, + help_text='state or province') + code = models.CharField(verbose_name='Postal Code', blank=True, null=True, + max_length=40) + country = models.CharField(blank=True, null=True, max_length=40) + + def __unicode__(self): + return u"%s's GBR: %s" % (self.issuer.handle, self.full_name) + + @models.permalink + def get_absolute_url(self): + return ('gbr-detail', [str(self.pk)]) + + class Meta: + ordering = ('family_name', 'given_name') + + +class Timestamp(models.Model): + """Model to hold metadata about the collection of external data. + + This model is a hash table mapping a timestamp name to the + timestamp value. All timestamps values are in UTC. + + The utility function rpki.gui.app.timestmap.update(name) should be used to + set timestamps rather than updating this model directly.""" + + name = models.CharField(max_length=30, primary_key=True) + ts = models.DateTimeField(null=False) + + def __unicode__(self): + return '%s: %s' % (self.name, self.ts) + + +class Repository(rpki.irdb.models.Repository): + class Meta: + proxy = True + verbose_name = 'Repository' + verbose_name_plural = 'Repositories' + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.repository_detail', [str(self.pk)]) + + def __unicode__(self): + return "%s's repository %s" % (self.issuer.handle, self.handle) + + +class Client(rpki.irdb.models.Client): + "Proxy model for pubd clients." + + class Meta: + proxy = True + verbose_name = 'Client' + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.client_detail', [str(self.pk)]) + + def __unicode__(self): + return self.handle + + +class RouteOrigin(rpki.gui.routeview.models.RouteOrigin): + class Meta: + proxy = True + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.route_detail', [str(self.pk)]) + + +class RouteOriginV6(rpki.gui.routeview.models.RouteOriginV6): + class Meta: + proxy = True + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.route_detail', [str(self.pk)]) + + +class ConfACL(models.Model): + """Stores access control for which users are allowed to manage a given + resource handle. + + """ + + conf = models.ForeignKey(Conf) + user = models.ForeignKey(User) + + class Meta: + unique_together = (('user', 'conf')) diff --git a/rpki/gui/app/range_list.py b/rpki/gui/app/range_list.py new file mode 100755 index 00000000..21fd1f29 --- /dev/null +++ b/rpki/gui/app/range_list.py @@ -0,0 +1,252 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import bisect +import unittest + + +class RangeList(list): + """A sorted list of ranges, which automatically merges adjacent ranges. + + Items in the list are expected to have ".min" and ".max" attributes.""" + + def __init__(self, ini=None): + list.__init__(self) + if ini: + self.extend(ini) + + def append(self, v): + keys = [x.min for x in self] + + # lower bound + i = bisect.bisect_left(keys, v.min) + + # upper bound + j = bisect.bisect_right(keys, v.max, lo=i) + + # if the max value for the previous item is greater than v.min, include + # the previous item in the range to replace and use its min value. + # also include the previous item if the max value is 1 less than the + # min value for the inserted item + if i > 0 and self[i - 1].max >= v.min - 1: + i = i - 1 + vmin = self[i].min + else: + vmin = v.min + + # if the max value for the previous item is greater than the max value + # for the new item, use the previous item's max + if j > 0 and self[j - 1].max > v.max: + vmax = self[j - 1].max + else: + vmax = v.max + + # if the max value for the new item is 1 less than the min value for + # the next item, combine into a single item + if j < len(self) and vmax + 1 == self[j].min: + vmax = self[j].max + j = j + 1 + + # replace the range with a new object covering the entire range + self[i:j] = [v.__class__(vmin, vmax)] + + def extend(self, args): + for x in args: + self.append(x) + + def difference(self, other): + """Return a RangeList object which contains ranges in this object which + are not in "other".""" + it = iter(other) + + try: + cur = it.next() + except StopIteration: + return self + + r = RangeList() + + for x in self: + xmin = x.min + + def V(v): + """convert the integer value to the appropriate type for this + range""" + return x.__class__.datum_type(v) + + try: + while xmin <= x.max: + if xmin < cur.min: + r.append(x.__class__(V(xmin), + V(min(x.max, cur.min - 1)))) + xmin = cur.max + 1 + elif xmin == cur.min: + xmin = cur.max + 1 + else: # xmin > cur.min + if xmin <= cur.max: + xmin = cur.max + 1 + else: # xmin > cur.max + cur = it.next() + + except StopIteration: + r.append(x.__class__(V(xmin), x.max)) + + return r + + +class TestRangeList(unittest.TestCase): + class MinMax(object): + datum_type = int + + def __init__(self, range_min, range_max): + self.min = range_min + self.max = range_max + + def __str__(self): + return '(%d, %d)' % (self.min, self.max) + + def __repr__(self): + return '<MinMax: (%d, %d)>' % (self.min, self.max) + + def __eq__(self, other): + return self.min == other.min and self.max == other.max + + def setUp(self): + self.v1 = TestRangeList.MinMax(1, 2) + self.v2 = TestRangeList.MinMax(4, 5) + self.v3 = TestRangeList.MinMax(7, 8) + self.v4 = TestRangeList.MinMax(3, 4) + self.v5 = TestRangeList.MinMax(2, 3) + self.v6 = TestRangeList.MinMax(1, 10) + + def test_empty_append(self): + s = RangeList() + s.append(self.v1) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v1) + + def test_no_overlap(self): + s = RangeList() + s.append(self.v1) + s.append(self.v2) + self.assertTrue(len(s) == 2) + self.assertEqual(s[0], self.v1) + self.assertEqual(s[1], self.v2) + + def test_no_overlap_prepend(self): + s = RangeList() + s.append(self.v2) + s.append(self.v1) + self.assertTrue(len(s) == 2) + self.assertEqual(s[0], self.v1) + self.assertEqual(s[1], self.v2) + + def test_insert_middle(self): + s = RangeList() + s.append(self.v1) + s.append(self.v3) + s.append(self.v2) + self.assertTrue(len(s) == 3) + self.assertEqual(s[0], self.v1) + self.assertEqual(s[1], self.v2) + self.assertEqual(s[2], self.v3) + + def test_append_overlap(self): + s = RangeList() + s.append(self.v1) + s.append(self.v5) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], TestRangeList.MinMax(1, 3)) + + def test_combine_range(self): + s = RangeList() + s.append(self.v1) + s.append(self.v4) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], TestRangeList.MinMax(1, 4)) + + def test_append_subset(self): + s = RangeList() + s.append(self.v6) + s.append(self.v3) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v6) + + def test_append_equal(self): + s = RangeList() + s.append(self.v6) + s.append(self.v6) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v6) + + def test_prepend_combine(self): + s = RangeList() + s.append(self.v4) + s.append(self.v1) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], TestRangeList.MinMax(1, 4)) + + def test_append_aggregate(self): + s = RangeList() + s.append(self.v1) + s.append(self.v2) + s.append(self.v3) + s.append(self.v6) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v6) + + def test_diff_empty(self): + s = RangeList() + s.append(self.v1) + self.assertEqual(s, s.difference([])) + + def test_diff_self(self): + s = RangeList() + s.append(self.v1) + self.assertEqual(s.difference(s), []) + + def test_diff_middle(self): + s1 = RangeList([self.v6]) + s2 = RangeList([self.v3]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1, 6), TestRangeList.MinMax(9, 10)])) + + def test_diff_overlap(self): + s1 = RangeList([self.v2]) + s2 = RangeList([self.v4]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(5, 5)])) + + def test_diff_overlap2(self): + s1 = RangeList([self.v2]) + s2 = RangeList([self.v4]) + self.assertEqual(s2.difference(s1), RangeList([TestRangeList.MinMax(3, 3)])) + + def test_diff_multi(self): + s1 = RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(4, 5)]) + s2 = RangeList([TestRangeList.MinMax(4, 4)]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(5, 5)])) + + def test_diff_multi_overlap(self): + s1 = RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(3, 4)]) + s2 = RangeList([TestRangeList.MinMax(2, 3)]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1,1), TestRangeList.MinMax(4,4)])) + + def test_diff_multi_overlap2(self): + s1 = RangeList([TestRangeList.MinMax(1,2), TestRangeList.MinMax(3,4), TestRangeList.MinMax(6,7)]) + s2 = RangeList([TestRangeList.MinMax(2, 3), TestRangeList.MinMax(6, 6)]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1,1), TestRangeList.MinMax(4,4), TestRangeList.MinMax(7,7)])) + +if __name__ == '__main__': + unittest.main() diff --git a/rpki/gui/app/static/css/bootstrap.min.css b/rpki/gui/app/static/css/bootstrap.min.css new file mode 100644 index 00000000..c10c7f41 --- /dev/null +++ b/rpki/gui/app/static/css/bootstrap.min.css @@ -0,0 +1,9 @@ +/*! + * Bootstrap v2.3.1 + * + * Copyright 2012 Twitter, Inc + * Licensed under the Apache License v2.0 + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Designed and built with all the love in the world @twitter by @mdo and @fat. + */.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;line-height:0;content:""}.clearfix:after{clear:both}.hide-text{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.input-block-level{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}a:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}a:hover,a:active{outline:0}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{width:auto\9;height:auto;max-width:100%;vertical-align:middle;border:0;-ms-interpolation-mode:bicubic}#map_canvas img,.google-maps img{max-width:none}button,input,select,textarea{margin:0;font-size:100%;vertical-align:middle}button,input{*overflow:visible;line-height:normal}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}button,html input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}label,select,button,input[type="button"],input[type="reset"],input[type="submit"],input[type="radio"],input[type="checkbox"]{cursor:pointer}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}textarea{overflow:auto;vertical-align:top}@media print{*{color:#000!important;text-shadow:none!important;background:transparent!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}}body{margin:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:20px;color:#333;background-color:#fff}a{color:#08c;text-decoration:none}a:hover,a:focus{color:#005580;text-decoration:underline}.img-rounded{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.img-polaroid{padding:4px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.1);box-shadow:0 1px 3px rgba(0,0,0,0.1)}.img-circle{-webkit-border-radius:500px;-moz-border-radius:500px;border-radius:500px}.row{margin-left:-20px;*zoom:1}.row:before,.row:after{display:table;line-height:0;content:""}.row:after{clear:both}[class*="span"]{float:left;min-height:1px;margin-left:20px}.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.span12{width:940px}.span11{width:860px}.span10{width:780px}.span9{width:700px}.span8{width:620px}.span7{width:540px}.span6{width:460px}.span5{width:380px}.span4{width:300px}.span3{width:220px}.span2{width:140px}.span1{width:60px}.offset12{margin-left:980px}.offset11{margin-left:900px}.offset10{margin-left:820px}.offset9{margin-left:740px}.offset8{margin-left:660px}.offset7{margin-left:580px}.offset6{margin-left:500px}.offset5{margin-left:420px}.offset4{margin-left:340px}.offset3{margin-left:260px}.offset2{margin-left:180px}.offset1{margin-left:100px}.row-fluid{width:100%;*zoom:1}.row-fluid:before,.row-fluid:after{display:table;line-height:0;content:""}.row-fluid:after{clear:both}.row-fluid [class*="span"]{display:block;float:left;width:100%;min-height:30px;margin-left:2.127659574468085%;*margin-left:2.074468085106383%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="span"]:first-child{margin-left:0}.row-fluid .controls-row [class*="span"]+[class*="span"]{margin-left:2.127659574468085%}.row-fluid .span12{width:100%;*width:99.94680851063829%}.row-fluid .span11{width:91.48936170212765%;*width:91.43617021276594%}.row-fluid .span10{width:82.97872340425532%;*width:82.92553191489361%}.row-fluid .span9{width:74.46808510638297%;*width:74.41489361702126%}.row-fluid .span8{width:65.95744680851064%;*width:65.90425531914893%}.row-fluid .span7{width:57.44680851063829%;*width:57.39361702127659%}.row-fluid .span6{width:48.93617021276595%;*width:48.88297872340425%}.row-fluid .span5{width:40.42553191489362%;*width:40.37234042553192%}.row-fluid .span4{width:31.914893617021278%;*width:31.861702127659576%}.row-fluid .span3{width:23.404255319148934%;*width:23.351063829787233%}.row-fluid .span2{width:14.893617021276595%;*width:14.840425531914894%}.row-fluid .span1{width:6.382978723404255%;*width:6.329787234042553%}.row-fluid .offset12{margin-left:104.25531914893617%;*margin-left:104.14893617021275%}.row-fluid .offset12:first-child{margin-left:102.12765957446808%;*margin-left:102.02127659574467%}.row-fluid .offset11{margin-left:95.74468085106382%;*margin-left:95.6382978723404%}.row-fluid .offset11:first-child{margin-left:93.61702127659574%;*margin-left:93.51063829787232%}.row-fluid .offset10{margin-left:87.23404255319149%;*margin-left:87.12765957446807%}.row-fluid .offset10:first-child{margin-left:85.1063829787234%;*margin-left:84.99999999999999%}.row-fluid .offset9{margin-left:78.72340425531914%;*margin-left:78.61702127659572%}.row-fluid .offset9:first-child{margin-left:76.59574468085106%;*margin-left:76.48936170212764%}.row-fluid .offset8{margin-left:70.2127659574468%;*margin-left:70.10638297872339%}.row-fluid .offset8:first-child{margin-left:68.08510638297872%;*margin-left:67.9787234042553%}.row-fluid .offset7{margin-left:61.70212765957446%;*margin-left:61.59574468085106%}.row-fluid .offset7:first-child{margin-left:59.574468085106375%;*margin-left:59.46808510638297%}.row-fluid .offset6{margin-left:53.191489361702125%;*margin-left:53.085106382978715%}.row-fluid .offset6:first-child{margin-left:51.063829787234035%;*margin-left:50.95744680851063%}.row-fluid .offset5{margin-left:44.68085106382979%;*margin-left:44.57446808510638%}.row-fluid .offset5:first-child{margin-left:42.5531914893617%;*margin-left:42.4468085106383%}.row-fluid .offset4{margin-left:36.170212765957444%;*margin-left:36.06382978723405%}.row-fluid .offset4:first-child{margin-left:34.04255319148936%;*margin-left:33.93617021276596%}.row-fluid .offset3{margin-left:27.659574468085104%;*margin-left:27.5531914893617%}.row-fluid .offset3:first-child{margin-left:25.53191489361702%;*margin-left:25.425531914893618%}.row-fluid .offset2{margin-left:19.148936170212764%;*margin-left:19.04255319148936%}.row-fluid .offset2:first-child{margin-left:17.02127659574468%;*margin-left:16.914893617021278%}.row-fluid .offset1{margin-left:10.638297872340425%;*margin-left:10.53191489361702%}.row-fluid .offset1:first-child{margin-left:8.51063829787234%;*margin-left:8.404255319148938%}[class*="span"].hide,.row-fluid [class*="span"].hide{display:none}[class*="span"].pull-right,.row-fluid [class*="span"].pull-right{float:right}.container{margin-right:auto;margin-left:auto;*zoom:1}.container:before,.container:after{display:table;line-height:0;content:""}.container:after{clear:both}.container-fluid{padding-right:20px;padding-left:20px;*zoom:1}.container-fluid:before,.container-fluid:after{display:table;line-height:0;content:""}.container-fluid:after{clear:both}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:21px;font-weight:200;line-height:30px}small{font-size:85%}strong{font-weight:bold}em{font-style:italic}cite{font-style:normal}.muted{color:#999}a.muted:hover,a.muted:focus{color:#808080}.text-warning{color:#c09853}a.text-warning:hover,a.text-warning:focus{color:#a47e3c}.text-error{color:#b94a48}a.text-error:hover,a.text-error:focus{color:#953b39}.text-info{color:#3a87ad}a.text-info:hover,a.text-info:focus{color:#2d6987}.text-success{color:#468847}a.text-success:hover,a.text-success:focus{color:#356635}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}h1,h2,h3,h4,h5,h6{margin:10px 0;font-family:inherit;font-weight:bold;line-height:20px;color:inherit;text-rendering:optimizelegibility}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-weight:normal;line-height:1;color:#999}h1,h2,h3{line-height:40px}h1{font-size:38.5px}h2{font-size:31.5px}h3{font-size:24.5px}h4{font-size:17.5px}h5{font-size:14px}h6{font-size:11.9px}h1 small{font-size:24.5px}h2 small{font-size:17.5px}h3 small{font-size:14px}h4 small{font-size:14px}.page-header{padding-bottom:9px;margin:20px 0 30px;border-bottom:1px solid #eee}ul,ol{padding:0;margin:0 0 10px 25px}ul ul,ul ol,ol ol,ol ul{margin-bottom:0}li{line-height:20px}ul.unstyled,ol.unstyled{margin-left:0;list-style:none}ul.inline,ol.inline{margin-left:0;list-style:none}ul.inline>li,ol.inline>li{display:inline-block;*display:inline;padding-right:5px;padding-left:5px;*zoom:1}dl{margin-bottom:20px}dt,dd{line-height:20px}dt{font-weight:bold}dd{margin-left:10px}.dl-horizontal{*zoom:1}.dl-horizontal:before,.dl-horizontal:after{display:table;line-height:0;content:""}.dl-horizontal:after{clear:both}.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}hr{margin:20px 0;border:0;border-top:1px solid #eee;border-bottom:1px solid #fff}abbr[title],abbr[data-original-title]{cursor:help;border-bottom:1px dotted #999}abbr.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:0 0 0 15px;margin:0 0 20px;border-left:5px solid #eee}blockquote p{margin-bottom:0;font-size:17.5px;font-weight:300;line-height:1.25}blockquote small{display:block;line-height:20px;color:#999}blockquote small:before{content:'\2014 \00A0'}blockquote.pull-right{float:right;padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0}blockquote.pull-right p,blockquote.pull-right small{text-align:right}blockquote.pull-right small:before{content:''}blockquote.pull-right small:after{content:'\00A0 \2014'}q:before,q:after,blockquote:before,blockquote:after{content:""}address{display:block;margin-bottom:20px;font-style:normal;line-height:20px}code,pre{padding:0 3px 2px;font-family:Monaco,Menlo,Consolas,"Courier New",monospace;font-size:12px;color:#333;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}code{padding:2px 4px;color:#d14;white-space:nowrap;background-color:#f7f7f9;border:1px solid #e1e1e8}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:20px;word-break:break-all;word-wrap:break-word;white-space:pre;white-space:pre-wrap;background-color:#f5f5f5;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}pre.prettyprint{margin-bottom:20px}pre code{padding:0;color:inherit;white-space:pre;white-space:pre-wrap;background-color:transparent;border:0}.pre-scrollable{max-height:340px;overflow-y:scroll}form{margin:0 0 20px}fieldset{padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:40px;color:#333;border:0;border-bottom:1px solid #e5e5e5}legend small{font-size:15px;color:#999}label,input,button,select,textarea{font-size:14px;font-weight:normal;line-height:20px}input,button,select,textarea{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif}label{display:block;margin-bottom:5px}select,textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{display:inline-block;height:20px;padding:4px 6px;margin-bottom:10px;font-size:14px;line-height:20px;color:#555;vertical-align:middle;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}input,textarea,.uneditable-input{width:206px}textarea{height:auto}textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{background-color:#fff;border:1px solid #ccc;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border linear .2s,box-shadow linear .2s;-moz-transition:border linear .2s,box-shadow linear .2s;-o-transition:border linear .2s,box-shadow linear .2s;transition:border linear .2s,box-shadow linear .2s}textarea:focus,input[type="text"]:focus,input[type="password"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus,.uneditable-input:focus{border-color:rgba(82,168,236,0.8);outline:0;outline:thin dotted \9;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6)}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;*margin-top:0;line-height:normal}input[type="file"],input[type="image"],input[type="submit"],input[type="reset"],input[type="button"],input[type="radio"],input[type="checkbox"]{width:auto}select,input[type="file"]{height:30px;*margin-top:4px;line-height:30px}select{width:220px;background-color:#fff;border:1px solid #ccc}select[multiple],select[size]{height:auto}select:focus,input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.uneditable-input,.uneditable-textarea{color:#999;cursor:not-allowed;background-color:#fcfcfc;border-color:#ccc;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);box-shadow:inset 0 1px 2px rgba(0,0,0,0.025)}.uneditable-input{overflow:hidden;white-space:nowrap}.uneditable-textarea{width:auto;height:auto}input:-moz-placeholder,textarea:-moz-placeholder{color:#999}input:-ms-input-placeholder,textarea:-ms-input-placeholder{color:#999}input::-webkit-input-placeholder,textarea::-webkit-input-placeholder{color:#999}.radio,.checkbox{min-height:20px;padding-left:20px}.radio input[type="radio"],.checkbox input[type="checkbox"]{float:left;margin-left:-20px}.controls>.radio:first-child,.controls>.checkbox:first-child{padding-top:5px}.radio.inline,.checkbox.inline{display:inline-block;padding-top:5px;margin-bottom:0;vertical-align:middle}.radio.inline+.radio.inline,.checkbox.inline+.checkbox.inline{margin-left:10px}.input-mini{width:60px}.input-small{width:90px}.input-medium{width:150px}.input-large{width:210px}.input-xlarge{width:270px}.input-xxlarge{width:530px}input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"]{float:none;margin-left:0}.input-append input[class*="span"],.input-append .uneditable-input[class*="span"],.input-prepend input[class*="span"],.input-prepend .uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"],.row-fluid .input-prepend [class*="span"],.row-fluid .input-append [class*="span"]{display:inline-block}input,textarea,.uneditable-input{margin-left:0}.controls-row [class*="span"]+[class*="span"]{margin-left:20px}input.span12,textarea.span12,.uneditable-input.span12{width:926px}input.span11,textarea.span11,.uneditable-input.span11{width:846px}input.span10,textarea.span10,.uneditable-input.span10{width:766px}input.span9,textarea.span9,.uneditable-input.span9{width:686px}input.span8,textarea.span8,.uneditable-input.span8{width:606px}input.span7,textarea.span7,.uneditable-input.span7{width:526px}input.span6,textarea.span6,.uneditable-input.span6{width:446px}input.span5,textarea.span5,.uneditable-input.span5{width:366px}input.span4,textarea.span4,.uneditable-input.span4{width:286px}input.span3,textarea.span3,.uneditable-input.span3{width:206px}input.span2,textarea.span2,.uneditable-input.span2{width:126px}input.span1,textarea.span1,.uneditable-input.span1{width:46px}.controls-row{*zoom:1}.controls-row:before,.controls-row:after{display:table;line-height:0;content:""}.controls-row:after{clear:both}.controls-row [class*="span"],.row-fluid .controls-row [class*="span"]{float:left}.controls-row .checkbox[class*="span"],.controls-row .radio[class*="span"]{padding-top:5px}input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#eee}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"][readonly],input[type="checkbox"][readonly]{background-color:transparent}.control-group.warning .control-label,.control-group.warning .help-block,.control-group.warning .help-inline{color:#c09853}.control-group.warning .checkbox,.control-group.warning .radio,.control-group.warning input,.control-group.warning select,.control-group.warning textarea{color:#c09853}.control-group.warning input,.control-group.warning select,.control-group.warning textarea{border-color:#c09853;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.warning input:focus,.control-group.warning select:focus,.control-group.warning textarea:focus{border-color:#a47e3c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e}.control-group.warning .input-prepend .add-on,.control-group.warning .input-append .add-on{color:#c09853;background-color:#fcf8e3;border-color:#c09853}.control-group.error .control-label,.control-group.error .help-block,.control-group.error .help-inline{color:#b94a48}.control-group.error .checkbox,.control-group.error .radio,.control-group.error input,.control-group.error select,.control-group.error textarea{color:#b94a48}.control-group.error input,.control-group.error select,.control-group.error textarea{border-color:#b94a48;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.error input:focus,.control-group.error select:focus,.control-group.error textarea:focus{border-color:#953b39;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392}.control-group.error .input-prepend .add-on,.control-group.error .input-append .add-on{color:#b94a48;background-color:#f2dede;border-color:#b94a48}.control-group.success .control-label,.control-group.success .help-block,.control-group.success .help-inline{color:#468847}.control-group.success .checkbox,.control-group.success .radio,.control-group.success input,.control-group.success select,.control-group.success textarea{color:#468847}.control-group.success input,.control-group.success select,.control-group.success textarea{border-color:#468847;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.success input:focus,.control-group.success select:focus,.control-group.success textarea:focus{border-color:#356635;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b}.control-group.success .input-prepend .add-on,.control-group.success .input-append .add-on{color:#468847;background-color:#dff0d8;border-color:#468847}.control-group.info .control-label,.control-group.info .help-block,.control-group.info .help-inline{color:#3a87ad}.control-group.info .checkbox,.control-group.info .radio,.control-group.info input,.control-group.info select,.control-group.info textarea{color:#3a87ad}.control-group.info input,.control-group.info select,.control-group.info textarea{border-color:#3a87ad;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.info input:focus,.control-group.info select:focus,.control-group.info textarea:focus{border-color:#2d6987;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3}.control-group.info .input-prepend .add-on,.control-group.info .input-append .add-on{color:#3a87ad;background-color:#d9edf7;border-color:#3a87ad}input:focus:invalid,textarea:focus:invalid,select:focus:invalid{color:#b94a48;border-color:#ee5f5b}input:focus:invalid:focus,textarea:focus:invalid:focus,select:focus:invalid:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7}.form-actions{padding:19px 20px 20px;margin-top:20px;margin-bottom:20px;background-color:#f5f5f5;border-top:1px solid #e5e5e5;*zoom:1}.form-actions:before,.form-actions:after{display:table;line-height:0;content:""}.form-actions:after{clear:both}.help-block,.help-inline{color:#595959}.help-block{display:block;margin-bottom:10px}.help-inline{display:inline-block;*display:inline;padding-left:5px;vertical-align:middle;*zoom:1}.input-append,.input-prepend{display:inline-block;margin-bottom:10px;font-size:0;white-space:nowrap;vertical-align:middle}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input,.input-append .dropdown-menu,.input-prepend .dropdown-menu,.input-append .popover,.input-prepend .popover{font-size:14px}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input{position:relative;margin-bottom:0;*margin-left:0;vertical-align:top;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-append input:focus,.input-prepend input:focus,.input-append select:focus,.input-prepend select:focus,.input-append .uneditable-input:focus,.input-prepend .uneditable-input:focus{z-index:2}.input-append .add-on,.input-prepend .add-on{display:inline-block;width:auto;height:20px;min-width:16px;padding:4px 5px;font-size:14px;font-weight:normal;line-height:20px;text-align:center;text-shadow:0 1px 0 #fff;background-color:#eee;border:1px solid #ccc}.input-append .add-on,.input-prepend .add-on,.input-append .btn,.input-prepend .btn,.input-append .btn-group>.dropdown-toggle,.input-prepend .btn-group>.dropdown-toggle{vertical-align:top;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-append .active,.input-prepend .active{background-color:#a9dba9;border-color:#46a546}.input-prepend .add-on,.input-prepend .btn{margin-right:-1px}.input-prepend .add-on:first-child,.input-prepend .btn:first-child{-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-append input,.input-append select,.input-append .uneditable-input{-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-append input+.btn-group .btn:last-child,.input-append select+.btn-group .btn:last-child,.input-append .uneditable-input+.btn-group .btn:last-child{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-append .add-on,.input-append .btn,.input-append .btn-group{margin-left:-1px}.input-append .add-on:last-child,.input-append .btn:last-child,.input-append .btn-group:last-child>.dropdown-toggle{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append input,.input-prepend.input-append select,.input-prepend.input-append .uneditable-input{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-prepend.input-append input+.btn-group .btn,.input-prepend.input-append select+.btn-group .btn,.input-prepend.input-append .uneditable-input+.btn-group .btn{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append .add-on:first-child,.input-prepend.input-append .btn:first-child{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-prepend.input-append .add-on:last-child,.input-prepend.input-append .btn:last-child{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append .btn-group:first-child{margin-left:0}input.search-query{padding-right:14px;padding-right:4px \9;padding-left:14px;padding-left:4px \9;margin-bottom:0;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.form-search .input-append .search-query,.form-search .input-prepend .search-query{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.form-search .input-append .search-query{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search .input-append .btn{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .search-query{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .btn{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search input,.form-inline input,.form-horizontal input,.form-search textarea,.form-inline textarea,.form-horizontal textarea,.form-search select,.form-inline select,.form-horizontal select,.form-search .help-inline,.form-inline .help-inline,.form-horizontal .help-inline,.form-search .uneditable-input,.form-inline .uneditable-input,.form-horizontal .uneditable-input,.form-search .input-prepend,.form-inline .input-prepend,.form-horizontal .input-prepend,.form-search .input-append,.form-inline .input-append,.form-horizontal .input-append{display:inline-block;*display:inline;margin-bottom:0;vertical-align:middle;*zoom:1}.form-search .hide,.form-inline .hide,.form-horizontal .hide{display:none}.form-search label,.form-inline label,.form-search .btn-group,.form-inline .btn-group{display:inline-block}.form-search .input-append,.form-inline .input-append,.form-search .input-prepend,.form-inline .input-prepend{margin-bottom:0}.form-search .radio,.form-search .checkbox,.form-inline .radio,.form-inline .checkbox{padding-left:0;margin-bottom:0;vertical-align:middle}.form-search .radio input[type="radio"],.form-search .checkbox input[type="checkbox"],.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{float:left;margin-right:3px;margin-left:0}.control-group{margin-bottom:10px}legend+.control-group{margin-top:20px;-webkit-margin-top-collapse:separate}.form-horizontal .control-group{margin-bottom:20px;*zoom:1}.form-horizontal .control-group:before,.form-horizontal .control-group:after{display:table;line-height:0;content:""}.form-horizontal .control-group:after{clear:both}.form-horizontal .control-label{float:left;width:160px;padding-top:5px;text-align:right}.form-horizontal .controls{*display:inline-block;*padding-left:20px;margin-left:180px;*margin-left:0}.form-horizontal .controls:first-child{*padding-left:180px}.form-horizontal .help-block{margin-bottom:0}.form-horizontal input+.help-block,.form-horizontal select+.help-block,.form-horizontal textarea+.help-block,.form-horizontal .uneditable-input+.help-block,.form-horizontal .input-prepend+.help-block,.form-horizontal .input-append+.help-block{margin-top:10px}.form-horizontal .form-actions{padding-left:180px}table{max-width:100%;background-color:transparent;border-collapse:collapse;border-spacing:0}.table{width:100%;margin-bottom:20px}.table th,.table td{padding:8px;line-height:20px;text-align:left;vertical-align:top;border-top:1px solid #ddd}.table th{font-weight:bold}.table thead th{vertical-align:bottom}.table caption+thead tr:first-child th,.table caption+thead tr:first-child td,.table colgroup+thead tr:first-child th,.table colgroup+thead tr:first-child td,.table thead:first-child tr:first-child th,.table thead:first-child tr:first-child td{border-top:0}.table tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed th,.table-condensed td{padding:4px 5px}.table-bordered{border:1px solid #ddd;border-collapse:separate;*border-collapse:collapse;border-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.table-bordered th,.table-bordered td{border-left:1px solid #ddd}.table-bordered caption+thead tr:first-child th,.table-bordered caption+tbody tr:first-child th,.table-bordered caption+tbody tr:first-child td,.table-bordered colgroup+thead tr:first-child th,.table-bordered colgroup+tbody tr:first-child th,.table-bordered colgroup+tbody tr:first-child td,.table-bordered thead:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child td{border-top:0}.table-bordered thead:first-child tr:first-child>th:first-child,.table-bordered tbody:first-child tr:first-child>td:first-child,.table-bordered tbody:first-child tr:first-child>th:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered thead:first-child tr:first-child>th:last-child,.table-bordered tbody:first-child tr:first-child>td:last-child,.table-bordered tbody:first-child tr:first-child>th:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-bordered thead:last-child tr:last-child>th:first-child,.table-bordered tbody:last-child tr:last-child>td:first-child,.table-bordered tbody:last-child tr:last-child>th:first-child,.table-bordered tfoot:last-child tr:last-child>td:first-child,.table-bordered tfoot:last-child tr:last-child>th:first-child{-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px}.table-bordered thead:last-child tr:last-child>th:last-child,.table-bordered tbody:last-child tr:last-child>td:last-child,.table-bordered tbody:last-child tr:last-child>th:last-child,.table-bordered tfoot:last-child tr:last-child>td:last-child,.table-bordered tfoot:last-child tr:last-child>th:last-child{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px}.table-bordered tfoot+tbody:last-child tr:last-child td:first-child{-webkit-border-bottom-left-radius:0;border-bottom-left-radius:0;-moz-border-radius-bottomleft:0}.table-bordered tfoot+tbody:last-child tr:last-child td:last-child{-webkit-border-bottom-right-radius:0;border-bottom-right-radius:0;-moz-border-radius-bottomright:0}.table-bordered caption+thead tr:first-child th:first-child,.table-bordered caption+tbody tr:first-child td:first-child,.table-bordered colgroup+thead tr:first-child th:first-child,.table-bordered colgroup+tbody tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered caption+thead tr:first-child th:last-child,.table-bordered caption+tbody tr:first-child td:last-child,.table-bordered colgroup+thead tr:first-child th:last-child,.table-bordered colgroup+tbody tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-striped tbody>tr:nth-child(odd)>td,.table-striped tbody>tr:nth-child(odd)>th{background-color:#f9f9f9}.table-hover tbody tr:hover>td,.table-hover tbody tr:hover>th{background-color:#f5f5f5}table td[class*="span"],table th[class*="span"],.row-fluid table td[class*="span"],.row-fluid table th[class*="span"]{display:table-cell;float:none;margin-left:0}.table td.span1,.table th.span1{float:none;width:44px;margin-left:0}.table td.span2,.table th.span2{float:none;width:124px;margin-left:0}.table td.span3,.table th.span3{float:none;width:204px;margin-left:0}.table td.span4,.table th.span4{float:none;width:284px;margin-left:0}.table td.span5,.table th.span5{float:none;width:364px;margin-left:0}.table td.span6,.table th.span6{float:none;width:444px;margin-left:0}.table td.span7,.table th.span7{float:none;width:524px;margin-left:0}.table td.span8,.table th.span8{float:none;width:604px;margin-left:0}.table td.span9,.table th.span9{float:none;width:684px;margin-left:0}.table td.span10,.table th.span10{float:none;width:764px;margin-left:0}.table td.span11,.table th.span11{float:none;width:844px;margin-left:0}.table td.span12,.table th.span12{float:none;width:924px;margin-left:0}.table tbody tr.success>td{background-color:#dff0d8}.table tbody tr.error>td{background-color:#f2dede}.table tbody tr.warning>td{background-color:#fcf8e3}.table tbody tr.info>td{background-color:#d9edf7}.table-hover tbody tr.success:hover>td{background-color:#d0e9c6}.table-hover tbody tr.error:hover>td{background-color:#ebcccc}.table-hover tbody tr.warning:hover>td{background-color:#faf2cc}.table-hover tbody tr.info:hover>td{background-color:#c4e3f3}[class^="icon-"],[class*=" icon-"]{display:inline-block;width:14px;height:14px;margin-top:1px;*margin-right:.3em;line-height:14px;vertical-align:text-top;background-image:url("../img/glyphicons-halflings.png");background-position:14px 14px;background-repeat:no-repeat}.icon-white,.nav-pills>.active>a>[class^="icon-"],.nav-pills>.active>a>[class*=" icon-"],.nav-list>.active>a>[class^="icon-"],.nav-list>.active>a>[class*=" icon-"],.navbar-inverse .nav>.active>a>[class^="icon-"],.navbar-inverse .nav>.active>a>[class*=" icon-"],.dropdown-menu>li>a:hover>[class^="icon-"],.dropdown-menu>li>a:focus>[class^="icon-"],.dropdown-menu>li>a:hover>[class*=" icon-"],.dropdown-menu>li>a:focus>[class*=" icon-"],.dropdown-menu>.active>a>[class^="icon-"],.dropdown-menu>.active>a>[class*=" icon-"],.dropdown-submenu:hover>a>[class^="icon-"],.dropdown-submenu:focus>a>[class^="icon-"],.dropdown-submenu:hover>a>[class*=" icon-"],.dropdown-submenu:focus>a>[class*=" icon-"]{background-image:url("../img/glyphicons-halflings-white.png")}.icon-glass{background-position:0 0}.icon-music{background-position:-24px 0}.icon-search{background-position:-48px 0}.icon-envelope{background-position:-72px 0}.icon-heart{background-position:-96px 0}.icon-star{background-position:-120px 0}.icon-star-empty{background-position:-144px 0}.icon-user{background-position:-168px 0}.icon-film{background-position:-192px 0}.icon-th-large{background-position:-216px 0}.icon-th{background-position:-240px 0}.icon-th-list{background-position:-264px 0}.icon-ok{background-position:-288px 0}.icon-remove{background-position:-312px 0}.icon-zoom-in{background-position:-336px 0}.icon-zoom-out{background-position:-360px 0}.icon-off{background-position:-384px 0}.icon-signal{background-position:-408px 0}.icon-cog{background-position:-432px 0}.icon-trash{background-position:-456px 0}.icon-home{background-position:0 -24px}.icon-file{background-position:-24px -24px}.icon-time{background-position:-48px -24px}.icon-road{background-position:-72px -24px}.icon-download-alt{background-position:-96px -24px}.icon-download{background-position:-120px -24px}.icon-upload{background-position:-144px -24px}.icon-inbox{background-position:-168px -24px}.icon-play-circle{background-position:-192px -24px}.icon-repeat{background-position:-216px -24px}.icon-refresh{background-position:-240px -24px}.icon-list-alt{background-position:-264px -24px}.icon-lock{background-position:-287px -24px}.icon-flag{background-position:-312px -24px}.icon-headphones{background-position:-336px -24px}.icon-volume-off{background-position:-360px -24px}.icon-volume-down{background-position:-384px -24px}.icon-volume-up{background-position:-408px -24px}.icon-qrcode{background-position:-432px -24px}.icon-barcode{background-position:-456px -24px}.icon-tag{background-position:0 -48px}.icon-tags{background-position:-25px -48px}.icon-book{background-position:-48px -48px}.icon-bookmark{background-position:-72px -48px}.icon-print{background-position:-96px -48px}.icon-camera{background-position:-120px -48px}.icon-font{background-position:-144px -48px}.icon-bold{background-position:-167px -48px}.icon-italic{background-position:-192px -48px}.icon-text-height{background-position:-216px -48px}.icon-text-width{background-position:-240px -48px}.icon-align-left{background-position:-264px -48px}.icon-align-center{background-position:-288px -48px}.icon-align-right{background-position:-312px -48px}.icon-align-justify{background-position:-336px -48px}.icon-list{background-position:-360px -48px}.icon-indent-left{background-position:-384px -48px}.icon-indent-right{background-position:-408px -48px}.icon-facetime-video{background-position:-432px -48px}.icon-picture{background-position:-456px -48px}.icon-pencil{background-position:0 -72px}.icon-map-marker{background-position:-24px -72px}.icon-adjust{background-position:-48px -72px}.icon-tint{background-position:-72px -72px}.icon-edit{background-position:-96px -72px}.icon-share{background-position:-120px -72px}.icon-check{background-position:-144px -72px}.icon-move{background-position:-168px -72px}.icon-step-backward{background-position:-192px -72px}.icon-fast-backward{background-position:-216px -72px}.icon-backward{background-position:-240px -72px}.icon-play{background-position:-264px -72px}.icon-pause{background-position:-288px -72px}.icon-stop{background-position:-312px -72px}.icon-forward{background-position:-336px -72px}.icon-fast-forward{background-position:-360px -72px}.icon-step-forward{background-position:-384px -72px}.icon-eject{background-position:-408px -72px}.icon-chevron-left{background-position:-432px -72px}.icon-chevron-right{background-position:-456px -72px}.icon-plus-sign{background-position:0 -96px}.icon-minus-sign{background-position:-24px -96px}.icon-remove-sign{background-position:-48px -96px}.icon-ok-sign{background-position:-72px -96px}.icon-question-sign{background-position:-96px -96px}.icon-info-sign{background-position:-120px -96px}.icon-screenshot{background-position:-144px -96px}.icon-remove-circle{background-position:-168px -96px}.icon-ok-circle{background-position:-192px -96px}.icon-ban-circle{background-position:-216px -96px}.icon-arrow-left{background-position:-240px -96px}.icon-arrow-right{background-position:-264px -96px}.icon-arrow-up{background-position:-289px -96px}.icon-arrow-down{background-position:-312px -96px}.icon-share-alt{background-position:-336px -96px}.icon-resize-full{background-position:-360px -96px}.icon-resize-small{background-position:-384px -96px}.icon-plus{background-position:-408px -96px}.icon-minus{background-position:-433px -96px}.icon-asterisk{background-position:-456px -96px}.icon-exclamation-sign{background-position:0 -120px}.icon-gift{background-position:-24px -120px}.icon-leaf{background-position:-48px -120px}.icon-fire{background-position:-72px -120px}.icon-eye-open{background-position:-96px -120px}.icon-eye-close{background-position:-120px -120px}.icon-warning-sign{background-position:-144px -120px}.icon-plane{background-position:-168px -120px}.icon-calendar{background-position:-192px -120px}.icon-random{width:16px;background-position:-216px -120px}.icon-comment{background-position:-240px -120px}.icon-magnet{background-position:-264px -120px}.icon-chevron-up{background-position:-288px -120px}.icon-chevron-down{background-position:-313px -119px}.icon-retweet{background-position:-336px -120px}.icon-shopping-cart{background-position:-360px -120px}.icon-folder-close{width:16px;background-position:-384px -120px}.icon-folder-open{width:16px;background-position:-408px -120px}.icon-resize-vertical{background-position:-432px -119px}.icon-resize-horizontal{background-position:-456px -118px}.icon-hdd{background-position:0 -144px}.icon-bullhorn{background-position:-24px -144px}.icon-bell{background-position:-48px -144px}.icon-certificate{background-position:-72px -144px}.icon-thumbs-up{background-position:-96px -144px}.icon-thumbs-down{background-position:-120px -144px}.icon-hand-right{background-position:-144px -144px}.icon-hand-left{background-position:-168px -144px}.icon-hand-up{background-position:-192px -144px}.icon-hand-down{background-position:-216px -144px}.icon-circle-arrow-right{background-position:-240px -144px}.icon-circle-arrow-left{background-position:-264px -144px}.icon-circle-arrow-up{background-position:-288px -144px}.icon-circle-arrow-down{background-position:-312px -144px}.icon-globe{background-position:-336px -144px}.icon-wrench{background-position:-360px -144px}.icon-tasks{background-position:-384px -144px}.icon-filter{background-position:-408px -144px}.icon-briefcase{background-position:-432px -144px}.icon-fullscreen{background-position:-456px -144px}.dropup,.dropdown{position:relative}.dropdown-toggle{*margin-bottom:-3px}.dropdown-toggle:active,.open .dropdown-toggle{outline:0}.caret{display:inline-block;width:0;height:0;vertical-align:top;border-top:4px solid #000;border-right:4px solid transparent;border-left:4px solid transparent;content:""}.dropdown .caret{margin-top:8px;margin-left:2px}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);*border-right-width:2px;*border-bottom-width:2px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:20px;color:#333;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus,.dropdown-submenu:hover>a,.dropdown-submenu:focus>a{color:#fff;text-decoration:none;background-color:#0081c2;background-image:-moz-linear-gradient(top,#08c,#0077b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#0077b3));background-image:-webkit-linear-gradient(top,#08c,#0077b3);background-image:-o-linear-gradient(top,#08c,#0077b3);background-image:linear-gradient(to bottom,#08c,#0077b3);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0077b3',GradientType=0)}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;background-color:#0081c2;background-image:-moz-linear-gradient(top,#08c,#0077b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#0077b3));background-image:-webkit-linear-gradient(top,#08c,#0077b3);background-image:-o-linear-gradient(top,#08c,#0077b3);background-image:linear-gradient(to bottom,#08c,#0077b3);background-repeat:repeat-x;outline:0;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0077b3',GradientType=0)}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#999}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;cursor:default;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open{*z-index:1000}.open>.dropdown-menu{display:block}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid #000;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px}.dropdown-submenu{position:relative}.dropdown-submenu>.dropdown-menu{top:0;left:100%;margin-top:-6px;margin-left:-1px;-webkit-border-radius:0 6px 6px 6px;-moz-border-radius:0 6px 6px 6px;border-radius:0 6px 6px 6px}.dropdown-submenu:hover>.dropdown-menu{display:block}.dropup .dropdown-submenu>.dropdown-menu{top:auto;bottom:0;margin-top:0;margin-bottom:-2px;-webkit-border-radius:5px 5px 5px 0;-moz-border-radius:5px 5px 5px 0;border-radius:5px 5px 5px 0}.dropdown-submenu>a:after{display:block;float:right;width:0;height:0;margin-top:5px;margin-right:-10px;border-color:transparent;border-left-color:#ccc;border-style:solid;border-width:5px 0 5px 5px;content:" "}.dropdown-submenu:hover>a:after{border-left-color:#fff}.dropdown-submenu.pull-left{float:none}.dropdown-submenu.pull-left>.dropdown-menu{left:-100%;margin-left:10px;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.dropdown .dropdown-menu .nav-header{padding-right:20px;padding-left:20px}.typeahead{z-index:1051;margin-top:2px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-large{padding:24px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.well-small{padding:9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.fade{opacity:0;-webkit-transition:opacity .15s linear;-moz-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{position:relative;height:0;overflow:hidden;-webkit-transition:height .35s ease;-moz-transition:height .35s ease;-o-transition:height .35s ease;transition:height .35s ease}.collapse.in{height:auto}.close{float:right;font-size:20px;font-weight:bold;line-height:20px;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;opacity:.4;filter:alpha(opacity=40)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.btn{display:inline-block;*display:inline;padding:4px 12px;margin-bottom:0;*margin-left:.3em;font-size:14px;line-height:20px;color:#333;text-align:center;text-shadow:0 1px 1px rgba(255,255,255,0.75);vertical-align:middle;cursor:pointer;background-color:#f5f5f5;*background-color:#e6e6e6;background-image:-moz-linear-gradient(top,#fff,#e6e6e6);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#e6e6e6));background-image:-webkit-linear-gradient(top,#fff,#e6e6e6);background-image:-o-linear-gradient(top,#fff,#e6e6e6);background-image:linear-gradient(to bottom,#fff,#e6e6e6);background-repeat:repeat-x;border:1px solid #ccc;*border:0;border-color:#e6e6e6 #e6e6e6 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);border-bottom-color:#b3b3b3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff',endColorstr='#ffe6e6e6',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);*zoom:1;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn:hover,.btn:focus,.btn:active,.btn.active,.btn.disabled,.btn[disabled]{color:#333;background-color:#e6e6e6;*background-color:#d9d9d9}.btn:active,.btn.active{background-color:#ccc \9}.btn:first-child{*margin-left:0}.btn:hover,.btn:focus{color:#333;text-decoration:none;background-position:0 -15px;-webkit-transition:background-position .1s linear;-moz-transition:background-position .1s linear;-o-transition:background-position .1s linear;transition:background-position .1s linear}.btn:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn.disabled,.btn[disabled]{cursor:default;background-image:none;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-large{padding:11px 19px;font-size:17.5px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.btn-large [class^="icon-"],.btn-large [class*=" icon-"]{margin-top:4px}.btn-small{padding:2px 10px;font-size:11.9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.btn-small [class^="icon-"],.btn-small [class*=" icon-"]{margin-top:0}.btn-mini [class^="icon-"],.btn-mini [class*=" icon-"]{margin-top:-1px}.btn-mini{padding:0 6px;font-size:10.5px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.btn-block{display:block;width:100%;padding-right:0;padding-left:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.btn-block+.btn-block{margin-top:5px}input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%}.btn-primary.active,.btn-warning.active,.btn-danger.active,.btn-success.active,.btn-info.active,.btn-inverse.active{color:rgba(255,255,255,0.75)}.btn-primary{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#006dcc;*background-color:#04c;background-image:-moz-linear-gradient(top,#08c,#04c);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#04c));background-image:-webkit-linear-gradient(top,#08c,#04c);background-image:-o-linear-gradient(top,#08c,#04c);background-image:linear-gradient(to bottom,#08c,#04c);background-repeat:repeat-x;border-color:#04c #04c #002a80;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0044cc',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-primary:hover,.btn-primary:focus,.btn-primary:active,.btn-primary.active,.btn-primary.disabled,.btn-primary[disabled]{color:#fff;background-color:#04c;*background-color:#003bb3}.btn-primary:active,.btn-primary.active{background-color:#039 \9}.btn-warning{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#faa732;*background-color:#f89406;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;border-color:#f89406 #f89406 #ad6704;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-warning:hover,.btn-warning:focus,.btn-warning:active,.btn-warning.active,.btn-warning.disabled,.btn-warning[disabled]{color:#fff;background-color:#f89406;*background-color:#df8505}.btn-warning:active,.btn-warning.active{background-color:#c67605 \9}.btn-danger{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#da4f49;*background-color:#bd362f;background-image:-moz-linear-gradient(top,#ee5f5b,#bd362f);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#bd362f));background-image:-webkit-linear-gradient(top,#ee5f5b,#bd362f);background-image:-o-linear-gradient(top,#ee5f5b,#bd362f);background-image:linear-gradient(to bottom,#ee5f5b,#bd362f);background-repeat:repeat-x;border-color:#bd362f #bd362f #802420;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffbd362f',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-danger:hover,.btn-danger:focus,.btn-danger:active,.btn-danger.active,.btn-danger.disabled,.btn-danger[disabled]{color:#fff;background-color:#bd362f;*background-color:#a9302a}.btn-danger:active,.btn-danger.active{background-color:#942a25 \9}.btn-success{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#5bb75b;*background-color:#51a351;background-image:-moz-linear-gradient(top,#62c462,#51a351);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#51a351));background-image:-webkit-linear-gradient(top,#62c462,#51a351);background-image:-o-linear-gradient(top,#62c462,#51a351);background-image:linear-gradient(to bottom,#62c462,#51a351);background-repeat:repeat-x;border-color:#51a351 #51a351 #387038;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff51a351',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-success:hover,.btn-success:focus,.btn-success:active,.btn-success.active,.btn-success.disabled,.btn-success[disabled]{color:#fff;background-color:#51a351;*background-color:#499249}.btn-success:active,.btn-success.active{background-color:#408140 \9}.btn-info{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#49afcd;*background-color:#2f96b4;background-image:-moz-linear-gradient(top,#5bc0de,#2f96b4);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#2f96b4));background-image:-webkit-linear-gradient(top,#5bc0de,#2f96b4);background-image:-o-linear-gradient(top,#5bc0de,#2f96b4);background-image:linear-gradient(to bottom,#5bc0de,#2f96b4);background-repeat:repeat-x;border-color:#2f96b4 #2f96b4 #1f6377;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff2f96b4',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-info:hover,.btn-info:focus,.btn-info:active,.btn-info.active,.btn-info.disabled,.btn-info[disabled]{color:#fff;background-color:#2f96b4;*background-color:#2a85a0}.btn-info:active,.btn-info.active{background-color:#24748c \9}.btn-inverse{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#363636;*background-color:#222;background-image:-moz-linear-gradient(top,#444,#222);background-image:-webkit-gradient(linear,0 0,0 100%,from(#444),to(#222));background-image:-webkit-linear-gradient(top,#444,#222);background-image:-o-linear-gradient(top,#444,#222);background-image:linear-gradient(to bottom,#444,#222);background-repeat:repeat-x;border-color:#222 #222 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff444444',endColorstr='#ff222222',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-inverse:hover,.btn-inverse:focus,.btn-inverse:active,.btn-inverse.active,.btn-inverse.disabled,.btn-inverse[disabled]{color:#fff;background-color:#222;*background-color:#151515}.btn-inverse:active,.btn-inverse.active{background-color:#080808 \9}button.btn,input[type="submit"].btn{*padding-top:3px;*padding-bottom:3px}button.btn::-moz-focus-inner,input[type="submit"].btn::-moz-focus-inner{padding:0;border:0}button.btn.btn-large,input[type="submit"].btn.btn-large{*padding-top:7px;*padding-bottom:7px}button.btn.btn-small,input[type="submit"].btn.btn-small{*padding-top:3px;*padding-bottom:3px}button.btn.btn-mini,input[type="submit"].btn.btn-mini{*padding-top:1px;*padding-bottom:1px}.btn-link,.btn-link:active,.btn-link[disabled]{background-color:transparent;background-image:none;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-link{color:#08c;cursor:pointer;border-color:transparent;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-link:hover,.btn-link:focus{color:#005580;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,.btn-link[disabled]:focus{color:#333;text-decoration:none}.btn-group{position:relative;display:inline-block;*display:inline;*margin-left:.3em;font-size:0;white-space:nowrap;vertical-align:middle;*zoom:1}.btn-group:first-child{*margin-left:0}.btn-group+.btn-group{margin-left:5px}.btn-toolbar{margin-top:10px;margin-bottom:10px;font-size:0}.btn-toolbar>.btn+.btn,.btn-toolbar>.btn-group+.btn,.btn-toolbar>.btn+.btn-group{margin-left:5px}.btn-group>.btn{position:relative;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group>.btn+.btn{margin-left:-1px}.btn-group>.btn,.btn-group>.dropdown-menu,.btn-group>.popover{font-size:14px}.btn-group>.btn-mini{font-size:10.5px}.btn-group>.btn-small{font-size:11.9px}.btn-group>.btn-large{font-size:17.5px}.btn-group>.btn:first-child{margin-left:0;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.btn-group>.btn:last-child,.btn-group>.dropdown-toggle{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.btn-group>.btn.large:first-child{margin-left:0;-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.btn-group>.btn.large:last-child,.btn-group>.large.dropdown-toggle{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active{z-index:2}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{*padding-top:5px;padding-right:8px;*padding-bottom:5px;padding-left:8px;-webkit-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn-group>.btn-mini+.dropdown-toggle{*padding-top:2px;padding-right:5px;*padding-bottom:2px;padding-left:5px}.btn-group>.btn-small+.dropdown-toggle{*padding-top:5px;*padding-bottom:4px}.btn-group>.btn-large+.dropdown-toggle{*padding-top:7px;padding-right:12px;*padding-bottom:7px;padding-left:12px}.btn-group.open .dropdown-toggle{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn-group.open .btn.dropdown-toggle{background-color:#e6e6e6}.btn-group.open .btn-primary.dropdown-toggle{background-color:#04c}.btn-group.open .btn-warning.dropdown-toggle{background-color:#f89406}.btn-group.open .btn-danger.dropdown-toggle{background-color:#bd362f}.btn-group.open .btn-success.dropdown-toggle{background-color:#51a351}.btn-group.open .btn-info.dropdown-toggle{background-color:#2f96b4}.btn-group.open .btn-inverse.dropdown-toggle{background-color:#222}.btn .caret{margin-top:8px;margin-left:0}.btn-large .caret{margin-top:6px}.btn-large .caret{border-top-width:5px;border-right-width:5px;border-left-width:5px}.btn-mini .caret,.btn-small .caret{margin-top:8px}.dropup .btn-large .caret{border-bottom-width:5px}.btn-primary .caret,.btn-warning .caret,.btn-danger .caret,.btn-info .caret,.btn-success .caret,.btn-inverse .caret{border-top-color:#fff;border-bottom-color:#fff}.btn-group-vertical{display:inline-block;*display:inline;*zoom:1}.btn-group-vertical>.btn{display:block;float:none;max-width:100%;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group-vertical>.btn+.btn{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:first-child{-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.btn-group-vertical>.btn:last-child{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.btn-group-vertical>.btn-large:first-child{-webkit-border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;border-radius:6px 6px 0 0}.btn-group-vertical>.btn-large:last-child{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.alert{padding:8px 35px 8px 14px;margin-bottom:20px;text-shadow:0 1px 0 rgba(255,255,255,0.5);background-color:#fcf8e3;border:1px solid #fbeed5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.alert,.alert h4{color:#c09853}.alert h4{margin:0}.alert .close{position:relative;top:-2px;right:-21px;line-height:20px}.alert-success{color:#468847;background-color:#dff0d8;border-color:#d6e9c6}.alert-success h4{color:#468847}.alert-danger,.alert-error{color:#b94a48;background-color:#f2dede;border-color:#eed3d7}.alert-danger h4,.alert-error h4{color:#b94a48}.alert-info{color:#3a87ad;background-color:#d9edf7;border-color:#bce8f1}.alert-info h4{color:#3a87ad}.alert-block{padding-top:14px;padding-bottom:14px}.alert-block>p,.alert-block>ul{margin-bottom:0}.alert-block p+p{margin-top:5px}.nav{margin-bottom:20px;margin-left:0;list-style:none}.nav>li>a{display:block}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#eee}.nav>li>a>img{max-width:none}.nav>.pull-right{float:right}.nav-header{display:block;padding:3px 15px;font-size:11px;font-weight:bold;line-height:20px;color:#999;text-shadow:0 1px 0 rgba(255,255,255,0.5);text-transform:uppercase}.nav li+.nav-header{margin-top:9px}.nav-list{padding-right:15px;padding-left:15px;margin-bottom:0}.nav-list>li>a,.nav-list .nav-header{margin-right:-15px;margin-left:-15px;text-shadow:0 1px 0 rgba(255,255,255,0.5)}.nav-list>li>a{padding:3px 15px}.nav-list>.active>a,.nav-list>.active>a:hover,.nav-list>.active>a:focus{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.2);background-color:#08c}.nav-list [class^="icon-"],.nav-list [class*=" icon-"]{margin-right:2px}.nav-list .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.nav-tabs,.nav-pills{*zoom:1}.nav-tabs:before,.nav-pills:before,.nav-tabs:after,.nav-pills:after{display:table;line-height:0;content:""}.nav-tabs:after,.nav-pills:after{clear:both}.nav-tabs>li,.nav-pills>li{float:left}.nav-tabs>li>a,.nav-pills>li>a{padding-right:12px;padding-left:12px;margin-right:2px;line-height:14px}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{margin-bottom:-1px}.nav-tabs>li>a{padding-top:8px;padding-bottom:8px;line-height:20px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover,.nav-tabs>li>a:focus{border-color:#eee #eee #ddd}.nav-tabs>.active>a,.nav-tabs>.active>a:hover,.nav-tabs>.active>a:focus{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-pills>li>a{padding-top:8px;padding-bottom:8px;margin-top:2px;margin-bottom:2px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px}.nav-pills>.active>a,.nav-pills>.active>a:hover,.nav-pills>.active>a:focus{color:#fff;background-color:#08c}.nav-stacked>li{float:none}.nav-stacked>li>a{margin-right:0}.nav-tabs.nav-stacked{border-bottom:0}.nav-tabs.nav-stacked>li>a{border:1px solid #ddd;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.nav-tabs.nav-stacked>li:first-child>a{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-topleft:4px}.nav-tabs.nav-stacked>li:last-child>a{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomright:4px;-moz-border-radius-bottomleft:4px}.nav-tabs.nav-stacked>li>a:hover,.nav-tabs.nav-stacked>li>a:focus{z-index:2;border-color:#ddd}.nav-pills.nav-stacked>li>a{margin-bottom:3px}.nav-pills.nav-stacked>li:last-child>a{margin-bottom:1px}.nav-tabs .dropdown-menu{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.nav-pills .dropdown-menu{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.nav .dropdown-toggle .caret{margin-top:6px;border-top-color:#08c;border-bottom-color:#08c}.nav .dropdown-toggle:hover .caret,.nav .dropdown-toggle:focus .caret{border-top-color:#005580;border-bottom-color:#005580}.nav-tabs .dropdown-toggle .caret{margin-top:8px}.nav .active .dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.nav-tabs .active .dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.nav>.dropdown.active>a:hover,.nav>.dropdown.active>a:focus{cursor:pointer}.nav-tabs .open .dropdown-toggle,.nav-pills .open .dropdown-toggle,.nav>li.dropdown.open.active>a:hover,.nav>li.dropdown.open.active>a:focus{color:#fff;background-color:#999;border-color:#999}.nav li.dropdown.open .caret,.nav li.dropdown.open.active .caret,.nav li.dropdown.open a:hover .caret,.nav li.dropdown.open a:focus .caret{border-top-color:#fff;border-bottom-color:#fff;opacity:1;filter:alpha(opacity=100)}.tabs-stacked .open>a:hover,.tabs-stacked .open>a:focus{border-color:#999}.tabbable{*zoom:1}.tabbable:before,.tabbable:after{display:table;line-height:0;content:""}.tabbable:after{clear:both}.tab-content{overflow:auto}.tabs-below>.nav-tabs,.tabs-right>.nav-tabs,.tabs-left>.nav-tabs{border-bottom:0}.tab-content>.tab-pane,.pill-content>.pill-pane{display:none}.tab-content>.active,.pill-content>.active{display:block}.tabs-below>.nav-tabs{border-top:1px solid #ddd}.tabs-below>.nav-tabs>li{margin-top:-1px;margin-bottom:0}.tabs-below>.nav-tabs>li>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.tabs-below>.nav-tabs>li>a:hover,.tabs-below>.nav-tabs>li>a:focus{border-top-color:#ddd;border-bottom-color:transparent}.tabs-below>.nav-tabs>.active>a,.tabs-below>.nav-tabs>.active>a:hover,.tabs-below>.nav-tabs>.active>a:focus{border-color:transparent #ddd #ddd #ddd}.tabs-left>.nav-tabs>li,.tabs-right>.nav-tabs>li{float:none}.tabs-left>.nav-tabs>li>a,.tabs-right>.nav-tabs>li>a{min-width:74px;margin-right:0;margin-bottom:3px}.tabs-left>.nav-tabs{float:left;margin-right:19px;border-right:1px solid #ddd}.tabs-left>.nav-tabs>li>a{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.tabs-left>.nav-tabs>li>a:hover,.tabs-left>.nav-tabs>li>a:focus{border-color:#eee #ddd #eee #eee}.tabs-left>.nav-tabs .active>a,.tabs-left>.nav-tabs .active>a:hover,.tabs-left>.nav-tabs .active>a:focus{border-color:#ddd transparent #ddd #ddd;*border-right-color:#fff}.tabs-right>.nav-tabs{float:right;margin-left:19px;border-left:1px solid #ddd}.tabs-right>.nav-tabs>li>a{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.tabs-right>.nav-tabs>li>a:hover,.tabs-right>.nav-tabs>li>a:focus{border-color:#eee #eee #eee #ddd}.tabs-right>.nav-tabs .active>a,.tabs-right>.nav-tabs .active>a:hover,.tabs-right>.nav-tabs .active>a:focus{border-color:#ddd #ddd #ddd transparent;*border-left-color:#fff}.nav>.disabled>a{color:#999}.nav>.disabled>a:hover,.nav>.disabled>a:focus{text-decoration:none;cursor:default;background-color:transparent}.navbar{*position:relative;*z-index:2;margin-bottom:20px;overflow:visible}.navbar-inner{min-height:40px;padding-right:20px;padding-left:20px;background-color:#fafafa;background-image:-moz-linear-gradient(top,#fff,#f2f2f2);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#f2f2f2));background-image:-webkit-linear-gradient(top,#fff,#f2f2f2);background-image:-o-linear-gradient(top,#fff,#f2f2f2);background-image:linear-gradient(to bottom,#fff,#f2f2f2);background-repeat:repeat-x;border:1px solid #d4d4d4;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff',endColorstr='#fff2f2f2',GradientType=0);*zoom:1;-webkit-box-shadow:0 1px 4px rgba(0,0,0,0.065);-moz-box-shadow:0 1px 4px rgba(0,0,0,0.065);box-shadow:0 1px 4px rgba(0,0,0,0.065)}.navbar-inner:before,.navbar-inner:after{display:table;line-height:0;content:""}.navbar-inner:after{clear:both}.navbar .container{width:auto}.nav-collapse.collapse{height:auto;overflow:visible}.navbar .brand{display:block;float:left;padding:10px 20px 10px;margin-left:-20px;font-size:20px;font-weight:200;color:#777;text-shadow:0 1px 0 #fff}.navbar .brand:hover,.navbar .brand:focus{text-decoration:none}.navbar-text{margin-bottom:0;line-height:40px;color:#777}.navbar-link{color:#777}.navbar-link:hover,.navbar-link:focus{color:#333}.navbar .divider-vertical{height:40px;margin:0 9px;border-right:1px solid #fff;border-left:1px solid #f2f2f2}.navbar .btn,.navbar .btn-group{margin-top:5px}.navbar .btn-group .btn,.navbar .input-prepend .btn,.navbar .input-append .btn,.navbar .input-prepend .btn-group,.navbar .input-append .btn-group{margin-top:0}.navbar-form{margin-bottom:0;*zoom:1}.navbar-form:before,.navbar-form:after{display:table;line-height:0;content:""}.navbar-form:after{clear:both}.navbar-form input,.navbar-form select,.navbar-form .radio,.navbar-form .checkbox{margin-top:5px}.navbar-form input,.navbar-form select,.navbar-form .btn{display:inline-block;margin-bottom:0}.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px}.navbar-form .input-append,.navbar-form .input-prepend{margin-top:5px;white-space:nowrap}.navbar-form .input-append input,.navbar-form .input-prepend input{margin-top:0}.navbar-search{position:relative;float:left;margin-top:5px;margin-bottom:0}.navbar-search .search-query{padding:4px 14px;margin-bottom:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:1;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.navbar-static-top{position:static;margin-bottom:0}.navbar-static-top .navbar-inner{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030;margin-bottom:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{border-width:0 0 1px}.navbar-fixed-bottom .navbar-inner{border-width:1px 0 0}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding-right:0;padding-left:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.navbar-fixed-top{top:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{-webkit-box-shadow:0 1px 10px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 10px rgba(0,0,0,0.1);box-shadow:0 1px 10px rgba(0,0,0,0.1)}.navbar-fixed-bottom{bottom:0}.navbar-fixed-bottom .navbar-inner{-webkit-box-shadow:0 -1px 10px rgba(0,0,0,0.1);-moz-box-shadow:0 -1px 10px rgba(0,0,0,0.1);box-shadow:0 -1px 10px rgba(0,0,0,0.1)}.navbar .nav{position:relative;left:0;display:block;float:left;margin:0 10px 0 0}.navbar .nav.pull-right{float:right;margin-right:0}.navbar .nav>li{float:left}.navbar .nav>li>a{float:none;padding:10px 15px 10px;color:#777;text-decoration:none;text-shadow:0 1px 0 #fff}.navbar .nav .dropdown-toggle .caret{margin-top:8px}.navbar .nav>li>a:focus,.navbar .nav>li>a:hover{color:#333;text-decoration:none;background-color:transparent}.navbar .nav>.active>a,.navbar .nav>.active>a:hover,.navbar .nav>.active>a:focus{color:#555;text-decoration:none;background-color:#e5e5e5;-webkit-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);-moz-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);box-shadow:inset 0 3px 8px rgba(0,0,0,0.125)}.navbar .btn-navbar{display:none;float:right;padding:7px 10px;margin-right:5px;margin-left:5px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#ededed;*background-color:#e5e5e5;background-image:-moz-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f2f2f2),to(#e5e5e5));background-image:-webkit-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-o-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:linear-gradient(to bottom,#f2f2f2,#e5e5e5);background-repeat:repeat-x;border-color:#e5e5e5 #e5e5e5 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2f2f2',endColorstr='#ffe5e5e5',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075)}.navbar .btn-navbar:hover,.navbar .btn-navbar:focus,.navbar .btn-navbar:active,.navbar .btn-navbar.active,.navbar .btn-navbar.disabled,.navbar .btn-navbar[disabled]{color:#fff;background-color:#e5e5e5;*background-color:#d9d9d9}.navbar .btn-navbar:active,.navbar .btn-navbar.active{background-color:#ccc \9}.navbar .btn-navbar .icon-bar{display:block;width:18px;height:2px;background-color:#f5f5f5;-webkit-border-radius:1px;-moz-border-radius:1px;border-radius:1px;-webkit-box-shadow:0 1px 0 rgba(0,0,0,0.25);-moz-box-shadow:0 1px 0 rgba(0,0,0,0.25);box-shadow:0 1px 0 rgba(0,0,0,0.25)}.btn-navbar .icon-bar+.icon-bar{margin-top:3px}.navbar .nav>li>.dropdown-menu:before{position:absolute;top:-7px;left:9px;display:inline-block;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-left:7px solid transparent;border-bottom-color:rgba(0,0,0,0.2);content:''}.navbar .nav>li>.dropdown-menu:after{position:absolute;top:-6px;left:10px;display:inline-block;border-right:6px solid transparent;border-bottom:6px solid #fff;border-left:6px solid transparent;content:''}.navbar-fixed-bottom .nav>li>.dropdown-menu:before{top:auto;bottom:-7px;border-top:7px solid #ccc;border-bottom:0;border-top-color:rgba(0,0,0,0.2)}.navbar-fixed-bottom .nav>li>.dropdown-menu:after{top:auto;bottom:-6px;border-top:6px solid #fff;border-bottom:0}.navbar .nav li.dropdown>a:hover .caret,.navbar .nav li.dropdown>a:focus .caret{border-top-color:#333;border-bottom-color:#333}.navbar .nav li.dropdown.open>.dropdown-toggle,.navbar .nav li.dropdown.active>.dropdown-toggle,.navbar .nav li.dropdown.open.active>.dropdown-toggle{color:#555;background-color:#e5e5e5}.navbar .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#777;border-bottom-color:#777}.navbar .nav li.dropdown.open>.dropdown-toggle .caret,.navbar .nav li.dropdown.active>.dropdown-toggle .caret,.navbar .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.navbar .pull-right>li>.dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right{right:0;left:auto}.navbar .pull-right>li>.dropdown-menu:before,.navbar .nav>li>.dropdown-menu.pull-right:before{right:12px;left:auto}.navbar .pull-right>li>.dropdown-menu:after,.navbar .nav>li>.dropdown-menu.pull-right:after{right:13px;left:auto}.navbar .pull-right>li>.dropdown-menu .dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right .dropdown-menu{right:100%;left:auto;margin-right:-1px;margin-left:0;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.navbar-inverse .navbar-inner{background-color:#1b1b1b;background-image:-moz-linear-gradient(top,#222,#111);background-image:-webkit-gradient(linear,0 0,0 100%,from(#222),to(#111));background-image:-webkit-linear-gradient(top,#222,#111);background-image:-o-linear-gradient(top,#222,#111);background-image:linear-gradient(to bottom,#222,#111);background-repeat:repeat-x;border-color:#252525;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff222222',endColorstr='#ff111111',GradientType=0)}.navbar-inverse .brand,.navbar-inverse .nav>li>a{color:#999;text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.navbar-inverse .brand:hover,.navbar-inverse .nav>li>a:hover,.navbar-inverse .brand:focus,.navbar-inverse .nav>li>a:focus{color:#fff}.navbar-inverse .brand{color:#999}.navbar-inverse .navbar-text{color:#999}.navbar-inverse .nav>li>a:focus,.navbar-inverse .nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .nav .active>a,.navbar-inverse .nav .active>a:hover,.navbar-inverse .nav .active>a:focus{color:#fff;background-color:#111}.navbar-inverse .navbar-link{color:#999}.navbar-inverse .navbar-link:hover,.navbar-inverse .navbar-link:focus{color:#fff}.navbar-inverse .divider-vertical{border-right-color:#222;border-left-color:#111}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle{color:#fff;background-color:#111}.navbar-inverse .nav li.dropdown>a:hover .caret,.navbar-inverse .nav li.dropdown>a:focus .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#999;border-bottom-color:#999}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .navbar-search .search-query{color:#fff;background-color:#515151;border-color:#111;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-webkit-transition:none;-moz-transition:none;-o-transition:none;transition:none}.navbar-inverse .navbar-search .search-query:-moz-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:-ms-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query::-webkit-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:focus,.navbar-inverse .navbar-search .search-query.focused{padding:5px 15px;color:#333;text-shadow:0 1px 0 #fff;background-color:#fff;border:0;outline:0;-webkit-box-shadow:0 0 3px rgba(0,0,0,0.15);-moz-box-shadow:0 0 3px rgba(0,0,0,0.15);box-shadow:0 0 3px rgba(0,0,0,0.15)}.navbar-inverse .btn-navbar{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e0e0e;*background-color:#040404;background-image:-moz-linear-gradient(top,#151515,#040404);background-image:-webkit-gradient(linear,0 0,0 100%,from(#151515),to(#040404));background-image:-webkit-linear-gradient(top,#151515,#040404);background-image:-o-linear-gradient(top,#151515,#040404);background-image:linear-gradient(to bottom,#151515,#040404);background-repeat:repeat-x;border-color:#040404 #040404 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff151515',endColorstr='#ff040404',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.navbar-inverse .btn-navbar:hover,.navbar-inverse .btn-navbar:focus,.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active,.navbar-inverse .btn-navbar.disabled,.navbar-inverse .btn-navbar[disabled]{color:#fff;background-color:#040404;*background-color:#000}.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active{background-color:#000 \9}.breadcrumb{padding:8px 15px;margin:0 0 20px;list-style:none;background-color:#f5f5f5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.breadcrumb>li{display:inline-block;*display:inline;text-shadow:0 1px 0 #fff;*zoom:1}.breadcrumb>li>.divider{padding:0 5px;color:#ccc}.breadcrumb>.active{color:#999}.pagination{margin:20px 0}.pagination ul{display:inline-block;*display:inline;margin-bottom:0;margin-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;*zoom:1;-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:0 1px 2px rgba(0,0,0,0.05);box-shadow:0 1px 2px rgba(0,0,0,0.05)}.pagination ul>li{display:inline}.pagination ul>li>a,.pagination ul>li>span{float:left;padding:4px 12px;line-height:20px;text-decoration:none;background-color:#fff;border:1px solid #ddd;border-left-width:0}.pagination ul>li>a:hover,.pagination ul>li>a:focus,.pagination ul>.active>a,.pagination ul>.active>span{background-color:#f5f5f5}.pagination ul>.active>a,.pagination ul>.active>span{color:#999;cursor:default}.pagination ul>.disabled>span,.pagination ul>.disabled>a,.pagination ul>.disabled>a:hover,.pagination ul>.disabled>a:focus{color:#999;cursor:default;background-color:transparent}.pagination ul>li:first-child>a,.pagination ul>li:first-child>span{border-left-width:1px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.pagination ul>li:last-child>a,.pagination ul>li:last-child>span{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.pagination-centered{text-align:center}.pagination-right{text-align:right}.pagination-large ul>li>a,.pagination-large ul>li>span{padding:11px 19px;font-size:17.5px}.pagination-large ul>li:first-child>a,.pagination-large ul>li:first-child>span{-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.pagination-large ul>li:last-child>a,.pagination-large ul>li:last-child>span{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.pagination-mini ul>li:first-child>a,.pagination-small ul>li:first-child>a,.pagination-mini ul>li:first-child>span,.pagination-small ul>li:first-child>span{-webkit-border-bottom-left-radius:3px;border-bottom-left-radius:3px;-webkit-border-top-left-radius:3px;border-top-left-radius:3px;-moz-border-radius-bottomleft:3px;-moz-border-radius-topleft:3px}.pagination-mini ul>li:last-child>a,.pagination-small ul>li:last-child>a,.pagination-mini ul>li:last-child>span,.pagination-small ul>li:last-child>span{-webkit-border-top-right-radius:3px;border-top-right-radius:3px;-webkit-border-bottom-right-radius:3px;border-bottom-right-radius:3px;-moz-border-radius-topright:3px;-moz-border-radius-bottomright:3px}.pagination-small ul>li>a,.pagination-small ul>li>span{padding:2px 10px;font-size:11.9px}.pagination-mini ul>li>a,.pagination-mini ul>li>span{padding:0 6px;font-size:10.5px}.pager{margin:20px 0;text-align:center;list-style:none;*zoom:1}.pager:before,.pager:after{display:table;line-height:0;content:""}.pager:after{clear:both}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#f5f5f5}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#999;cursor:default;background-color:#fff}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop,.modal-backdrop.fade.in{opacity:.8;filter:alpha(opacity=80)}.modal{position:fixed;top:10%;left:50%;z-index:1050;width:560px;margin-left:-280px;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;outline:0;-webkit-box-shadow:0 3px 7px rgba(0,0,0,0.3);-moz-box-shadow:0 3px 7px rgba(0,0,0,0.3);box-shadow:0 3px 7px rgba(0,0,0,0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box}.modal.fade{top:-25%;-webkit-transition:opacity .3s linear,top .3s ease-out;-moz-transition:opacity .3s linear,top .3s ease-out;-o-transition:opacity .3s linear,top .3s ease-out;transition:opacity .3s linear,top .3s ease-out}.modal.fade.in{top:10%}.modal-header{padding:9px 15px;border-bottom:1px solid #eee}.modal-header .close{margin-top:2px}.modal-header h3{margin:0;line-height:30px}.modal-body{position:relative;max-height:400px;padding:15px;overflow-y:auto}.modal-form{margin-bottom:0}.modal-footer{padding:14px 15px 15px;margin-bottom:0;text-align:right;background-color:#f5f5f5;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;*zoom:1;-webkit-box-shadow:inset 0 1px 0 #fff;-moz-box-shadow:inset 0 1px 0 #fff;box-shadow:inset 0 1px 0 #fff}.modal-footer:before,.modal-footer:after{display:table;line-height:0;content:""}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.tooltip{position:absolute;z-index:1030;display:block;font-size:11px;line-height:1.4;opacity:0;filter:alpha(opacity=0);visibility:visible}.tooltip.in{opacity:.8;filter:alpha(opacity=80)}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:8px;color:#fff;text-align:center;text-decoration:none;background-color:#000;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-top-color:#000;border-width:5px 5px 0}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-right-color:#000;border-width:5px 5px 5px 0}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-left-color:#000;border-width:5px 0 5px 5px}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-bottom-color:#000;border-width:0 5px 5px}.popover{position:absolute;top:0;left:0;z-index:1010;display:none;max-width:276px;padding:1px;text-align:left;white-space:normal;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;font-weight:normal;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0}.popover-title:empty{display:none}.popover-content{padding:9px 14px}.popover .arrow,.popover .arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover .arrow{border-width:11px}.popover .arrow:after{border-width:10px;content:""}.popover.top .arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);border-bottom-width:0}.popover.top .arrow:after{bottom:1px;margin-left:-10px;border-top-color:#fff;border-bottom-width:0}.popover.right .arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,0.25);border-left-width:0}.popover.right .arrow:after{bottom:-10px;left:1px;border-right-color:#fff;border-left-width:0}.popover.bottom .arrow{top:-11px;left:50%;margin-left:-11px;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25);border-top-width:0}.popover.bottom .arrow:after{top:1px;margin-left:-10px;border-bottom-color:#fff;border-top-width:0}.popover.left .arrow{top:50%;right:-11px;margin-top:-11px;border-left-color:#999;border-left-color:rgba(0,0,0,0.25);border-right-width:0}.popover.left .arrow:after{right:1px;bottom:-10px;border-left-color:#fff;border-right-width:0}.thumbnails{margin-left:-20px;list-style:none;*zoom:1}.thumbnails:before,.thumbnails:after{display:table;line-height:0;content:""}.thumbnails:after{clear:both}.row-fluid .thumbnails{margin-left:0}.thumbnails>li{float:left;margin-bottom:20px;margin-left:20px}.thumbnail{display:block;padding:4px;line-height:20px;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.055);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.055);box-shadow:0 1px 3px rgba(0,0,0,0.055);-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}a.thumbnail:hover,a.thumbnail:focus{border-color:#08c;-webkit-box-shadow:0 1px 4px rgba(0,105,214,0.25);-moz-box-shadow:0 1px 4px rgba(0,105,214,0.25);box-shadow:0 1px 4px rgba(0,105,214,0.25)}.thumbnail>img{display:block;max-width:100%;margin-right:auto;margin-left:auto}.thumbnail .caption{padding:9px;color:#555}.media,.media-body{overflow:hidden;*overflow:visible;zoom:1}.media,.media .media{margin-top:15px}.media:first-child{margin-top:0}.media-object{display:block}.media-heading{margin:0 0 5px}.media>.pull-left{margin-right:10px}.media>.pull-right{margin-left:10px}.media-list{margin-left:0;list-style:none}.label,.badge{display:inline-block;padding:2px 4px;font-size:11.844px;font-weight:bold;line-height:14px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);white-space:nowrap;vertical-align:baseline;background-color:#999}.label{-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.badge{padding-right:9px;padding-left:9px;-webkit-border-radius:9px;-moz-border-radius:9px;border-radius:9px}.label:empty,.badge:empty{display:none}a.label:hover,a.label:focus,a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}.label-important,.badge-important{background-color:#b94a48}.label-important[href],.badge-important[href]{background-color:#953b39}.label-warning,.badge-warning{background-color:#f89406}.label-warning[href],.badge-warning[href]{background-color:#c67605}.label-success,.badge-success{background-color:#468847}.label-success[href],.badge-success[href]{background-color:#356635}.label-info,.badge-info{background-color:#3a87ad}.label-info[href],.badge-info[href]{background-color:#2d6987}.label-inverse,.badge-inverse{background-color:#333}.label-inverse[href],.badge-inverse[href]{background-color:#1a1a1a}.btn .label,.btn .badge{position:relative;top:-1px}.btn-mini .label,.btn-mini .badge{top:0}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-moz-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-ms-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:0 0}to{background-position:40px 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f7f7f7;background-image:-moz-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f5f5f5),to(#f9f9f9));background-image:-webkit-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-o-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:linear-gradient(to bottom,#f5f5f5,#f9f9f9);background-repeat:repeat-x;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5',endColorstr='#fff9f9f9',GradientType=0);-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress .bar{float:left;width:0;height:100%;font-size:12px;color:#fff;text-align:center;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e90d2;background-image:-moz-linear-gradient(top,#149bdf,#0480be);background-image:-webkit-gradient(linear,0 0,0 100%,from(#149bdf),to(#0480be));background-image:-webkit-linear-gradient(top,#149bdf,#0480be);background-image:-o-linear-gradient(top,#149bdf,#0480be);background-image:linear-gradient(to bottom,#149bdf,#0480be);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff149bdf',endColorstr='#ff0480be',GradientType=0);-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:width .6s ease;-moz-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress .bar+.bar{-webkit-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15)}.progress-striped .bar{background-color:#149bdf;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;-moz-background-size:40px 40px;-o-background-size:40px 40px;background-size:40px 40px}.progress.active .bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-moz-animation:progress-bar-stripes 2s linear infinite;-ms-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-danger .bar,.progress .bar-danger{background-color:#dd514c;background-image:-moz-linear-gradient(top,#ee5f5b,#c43c35);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#c43c35));background-image:-webkit-linear-gradient(top,#ee5f5b,#c43c35);background-image:-o-linear-gradient(top,#ee5f5b,#c43c35);background-image:linear-gradient(to bottom,#ee5f5b,#c43c35);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffc43c35',GradientType=0)}.progress-danger.progress-striped .bar,.progress-striped .bar-danger{background-color:#ee5f5b;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-success .bar,.progress .bar-success{background-color:#5eb95e;background-image:-moz-linear-gradient(top,#62c462,#57a957);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#57a957));background-image:-webkit-linear-gradient(top,#62c462,#57a957);background-image:-o-linear-gradient(top,#62c462,#57a957);background-image:linear-gradient(to bottom,#62c462,#57a957);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff57a957',GradientType=0)}.progress-success.progress-striped .bar,.progress-striped .bar-success{background-color:#62c462;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-info .bar,.progress .bar-info{background-color:#4bb1cf;background-image:-moz-linear-gradient(top,#5bc0de,#339bb9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#339bb9));background-image:-webkit-linear-gradient(top,#5bc0de,#339bb9);background-image:-o-linear-gradient(top,#5bc0de,#339bb9);background-image:linear-gradient(to bottom,#5bc0de,#339bb9);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff339bb9',GradientType=0)}.progress-info.progress-striped .bar,.progress-striped .bar-info{background-color:#5bc0de;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-warning .bar,.progress .bar-warning{background-color:#faa732;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0)}.progress-warning.progress-striped .bar,.progress-striped .bar-warning{background-color:#fbb450;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.accordion{margin-bottom:20px}.accordion-group{margin-bottom:2px;border:1px solid #e5e5e5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.accordion-heading{border-bottom:0}.accordion-heading .accordion-toggle{display:block;padding:8px 15px}.accordion-toggle{cursor:pointer}.accordion-inner{padding:9px 15px;border-top:1px solid #e5e5e5}.carousel{position:relative;margin-bottom:20px;line-height:1}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-moz-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;line-height:1}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:40%;left:15px;width:40px;height:40px;margin-top:-20px;font-size:60px;font-weight:100;line-height:30px;color:#fff;text-align:center;background:#222;border:3px solid #fff;-webkit-border-radius:23px;-moz-border-radius:23px;border-radius:23px;opacity:.5;filter:alpha(opacity=50)}.carousel-control.right{right:15px;left:auto}.carousel-control:hover,.carousel-control:focus{color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-indicators{position:absolute;top:15px;right:15px;z-index:5;margin:0;list-style:none}.carousel-indicators li{display:block;float:left;width:10px;height:10px;margin-left:5px;text-indent:-999px;background-color:#ccc;background-color:rgba(255,255,255,0.25);border-radius:5px}.carousel-indicators .active{background-color:#fff}.carousel-caption{position:absolute;right:0;bottom:0;left:0;padding:15px;background:#333;background:rgba(0,0,0,0.75)}.carousel-caption h4,.carousel-caption p{line-height:20px;color:#fff}.carousel-caption h4{margin:0 0 5px}.carousel-caption p{margin-bottom:0}.hero-unit{padding:60px;margin-bottom:30px;font-size:18px;font-weight:200;line-height:30px;color:inherit;background-color:#eee;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;letter-spacing:-1px;color:inherit}.hero-unit li{line-height:30px}.pull-right{float:right}.pull-left{float:left}.hide{display:none}.show{display:block}.invisible{visibility:hidden}.affix{position:fixed} diff --git a/rpki/gui/app/static/img/glyphicons-halflings-white.png b/rpki/gui/app/static/img/glyphicons-halflings-white.png Binary files differnew file mode 100644 index 00000000..3bf6484a --- /dev/null +++ b/rpki/gui/app/static/img/glyphicons-halflings-white.png diff --git a/rpki/gui/app/static/img/glyphicons-halflings.png b/rpki/gui/app/static/img/glyphicons-halflings.png Binary files differnew file mode 100644 index 00000000..a9969993 --- /dev/null +++ b/rpki/gui/app/static/img/glyphicons-halflings.png diff --git a/rpki/gui/app/static/img/sui-riu.ico b/rpki/gui/app/static/img/sui-riu.ico Binary files differnew file mode 100644 index 00000000..61223e27 --- /dev/null +++ b/rpki/gui/app/static/img/sui-riu.ico diff --git a/rpki/gui/app/static/js/bootstrap.min.js b/rpki/gui/app/static/js/bootstrap.min.js new file mode 100644 index 00000000..95c5ac5e --- /dev/null +++ b/rpki/gui/app/static/js/bootstrap.min.js @@ -0,0 +1,6 @@ +/*! +* Bootstrap.js by @fat & @mdo +* Copyright 2012 Twitter, Inc. +* http://www.apache.org/licenses/LICENSE-2.0.txt +*/ +!function(e){"use strict";e(function(){e.support.transition=function(){var e=function(){var e=document.createElement("bootstrap"),t={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},n;for(n in t)if(e.style[n]!==undefined)return t[n]}();return e&&{end:e}}()})}(window.jQuery),!function(e){"use strict";var t='[data-dismiss="alert"]',n=function(n){e(n).on("click",t,this.close)};n.prototype.close=function(t){function s(){i.trigger("closed").remove()}var n=e(this),r=n.attr("data-target"),i;r||(r=n.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,"")),i=e(r),t&&t.preventDefault(),i.length||(i=n.hasClass("alert")?n:n.parent()),i.trigger(t=e.Event("close"));if(t.isDefaultPrevented())return;i.removeClass("in"),e.support.transition&&i.hasClass("fade")?i.on(e.support.transition.end,s):s()};var r=e.fn.alert;e.fn.alert=function(t){return this.each(function(){var r=e(this),i=r.data("alert");i||r.data("alert",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.alert.Constructor=n,e.fn.alert.noConflict=function(){return e.fn.alert=r,this},e(document).on("click.alert.data-api",t,n.prototype.close)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.button.defaults,n)};t.prototype.setState=function(e){var t="disabled",n=this.$element,r=n.data(),i=n.is("input")?"val":"html";e+="Text",r.resetText||n.data("resetText",n[i]()),n[i](r[e]||this.options[e]),setTimeout(function(){e=="loadingText"?n.addClass(t).attr(t,t):n.removeClass(t).removeAttr(t)},0)},t.prototype.toggle=function(){var e=this.$element.closest('[data-toggle="buttons-radio"]');e&&e.find(".active").removeClass("active"),this.$element.toggleClass("active")};var n=e.fn.button;e.fn.button=function(n){return this.each(function(){var r=e(this),i=r.data("button"),s=typeof n=="object"&&n;i||r.data("button",i=new t(this,s)),n=="toggle"?i.toggle():n&&i.setState(n)})},e.fn.button.defaults={loadingText:"loading..."},e.fn.button.Constructor=t,e.fn.button.noConflict=function(){return e.fn.button=n,this},e(document).on("click.button.data-api","[data-toggle^=button]",function(t){var n=e(t.target);n.hasClass("btn")||(n=n.closest(".btn")),n.button("toggle")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=n,this.options.pause=="hover"&&this.$element.on("mouseenter",e.proxy(this.pause,this)).on("mouseleave",e.proxy(this.cycle,this))};t.prototype={cycle:function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(e.proxy(this.next,this),this.options.interval)),this},getActiveIndex:function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},to:function(t){var n=this.getActiveIndex(),r=this;if(t>this.$items.length-1||t<0)return;return this.sliding?this.$element.one("slid",function(){r.to(t)}):n==t?this.pause().cycle():this.slide(t>n?"next":"prev",e(this.$items[t]))},pause:function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&e.support.transition.end&&(this.$element.trigger(e.support.transition.end),this.cycle(!0)),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(t,n){var r=this.$element.find(".item.active"),i=n||r[t](),s=this.interval,o=t=="next"?"left":"right",u=t=="next"?"first":"last",a=this,f;this.sliding=!0,s&&this.pause(),i=i.length?i:this.$element.find(".item")[u](),f=e.Event("slide",{relatedTarget:i[0],direction:o});if(i.hasClass("active"))return;this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid",function(){var t=e(a.$indicators.children()[a.getActiveIndex()]);t&&t.addClass("active")}));if(e.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(f);if(f.isDefaultPrevented())return;i.addClass(t),i[0].offsetWidth,r.addClass(o),i.addClass(o),this.$element.one(e.support.transition.end,function(){i.removeClass([t,o].join(" ")).addClass("active"),r.removeClass(["active",o].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger("slid")},0)})}else{this.$element.trigger(f);if(f.isDefaultPrevented())return;r.removeClass("active"),i.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return s&&this.cycle(),this}};var n=e.fn.carousel;e.fn.carousel=function(n){return this.each(function(){var r=e(this),i=r.data("carousel"),s=e.extend({},e.fn.carousel.defaults,typeof n=="object"&&n),o=typeof n=="string"?n:s.slide;i||r.data("carousel",i=new t(this,s)),typeof n=="number"?i.to(n):o?i[o]():s.interval&&i.pause().cycle()})},e.fn.carousel.defaults={interval:5e3,pause:"hover"},e.fn.carousel.Constructor=t,e.fn.carousel.noConflict=function(){return e.fn.carousel=n,this},e(document).on("click.carousel.data-api","[data-slide], [data-slide-to]",function(t){var n=e(this),r,i=e(n.attr("data-target")||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,"")),s=e.extend({},i.data(),n.data()),o;i.carousel(s),(o=n.attr("data-slide-to"))&&i.data("carousel").pause().to(o).cycle(),t.preventDefault()})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.collapse.defaults,n),this.options.parent&&(this.$parent=e(this.options.parent)),this.options.toggle&&this.toggle()};t.prototype={constructor:t,dimension:function(){var e=this.$element.hasClass("width");return e?"width":"height"},show:function(){var t,n,r,i;if(this.transitioning||this.$element.hasClass("in"))return;t=this.dimension(),n=e.camelCase(["scroll",t].join("-")),r=this.$parent&&this.$parent.find("> .accordion-group > .in");if(r&&r.length){i=r.data("collapse");if(i&&i.transitioning)return;r.collapse("hide"),i||r.data("collapse",null)}this.$element[t](0),this.transition("addClass",e.Event("show"),"shown"),e.support.transition&&this.$element[t](this.$element[0][n])},hide:function(){var t;if(this.transitioning||!this.$element.hasClass("in"))return;t=this.dimension(),this.reset(this.$element[t]()),this.transition("removeClass",e.Event("hide"),"hidden"),this.$element[t](0)},reset:function(e){var t=this.dimension();return this.$element.removeClass("collapse")[t](e||"auto")[0].offsetWidth,this.$element[e!==null?"addClass":"removeClass"]("collapse"),this},transition:function(t,n,r){var i=this,s=function(){n.type=="show"&&i.reset(),i.transitioning=0,i.$element.trigger(r)};this.$element.trigger(n);if(n.isDefaultPrevented())return;this.transitioning=1,this.$element[t]("in"),e.support.transition&&this.$element.hasClass("collapse")?this.$element.one(e.support.transition.end,s):s()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}};var n=e.fn.collapse;e.fn.collapse=function(n){return this.each(function(){var r=e(this),i=r.data("collapse"),s=e.extend({},e.fn.collapse.defaults,r.data(),typeof n=="object"&&n);i||r.data("collapse",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.collapse.defaults={toggle:!0},e.fn.collapse.Constructor=t,e.fn.collapse.noConflict=function(){return e.fn.collapse=n,this},e(document).on("click.collapse.data-api","[data-toggle=collapse]",function(t){var n=e(this),r,i=n.attr("data-target")||t.preventDefault()||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,""),s=e(i).data("collapse")?"toggle":n.data();n[e(i).hasClass("in")?"addClass":"removeClass"]("collapsed"),e(i).collapse(s)})}(window.jQuery),!function(e){"use strict";function r(){e(t).each(function(){i(e(this)).removeClass("open")})}function i(t){var n=t.attr("data-target"),r;n||(n=t.attr("href"),n=n&&/#/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")),r=n&&e(n);if(!r||!r.length)r=t.parent();return r}var t="[data-toggle=dropdown]",n=function(t){var n=e(t).on("click.dropdown.data-api",this.toggle);e("html").on("click.dropdown.data-api",function(){n.parent().removeClass("open")})};n.prototype={constructor:n,toggle:function(t){var n=e(this),s,o;if(n.is(".disabled, :disabled"))return;return s=i(n),o=s.hasClass("open"),r(),o||s.toggleClass("open"),n.focus(),!1},keydown:function(n){var r,s,o,u,a,f;if(!/(38|40|27)/.test(n.keyCode))return;r=e(this),n.preventDefault(),n.stopPropagation();if(r.is(".disabled, :disabled"))return;u=i(r),a=u.hasClass("open");if(!a||a&&n.keyCode==27)return n.which==27&&u.find(t).focus(),r.click();s=e("[role=menu] li:not(.divider):visible a",u);if(!s.length)return;f=s.index(s.filter(":focus")),n.keyCode==38&&f>0&&f--,n.keyCode==40&&f<s.length-1&&f++,~f||(f=0),s.eq(f).focus()}};var s=e.fn.dropdown;e.fn.dropdown=function(t){return this.each(function(){var r=e(this),i=r.data("dropdown");i||r.data("dropdown",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.dropdown.Constructor=n,e.fn.dropdown.noConflict=function(){return e.fn.dropdown=s,this},e(document).on("click.dropdown.data-api",r).on("click.dropdown.data-api",".dropdown form",function(e){e.stopPropagation()}).on("click.dropdown-menu",function(e){e.stopPropagation()}).on("click.dropdown.data-api",t,n.prototype.toggle).on("keydown.dropdown.data-api",t+", [role=menu]",n.prototype.keydown)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=n,this.$element=e(t).delegate('[data-dismiss="modal"]',"click.dismiss.modal",e.proxy(this.hide,this)),this.options.remote&&this.$element.find(".modal-body").load(this.options.remote)};t.prototype={constructor:t,toggle:function(){return this[this.isShown?"hide":"show"]()},show:function(){var t=this,n=e.Event("show");this.$element.trigger(n);if(this.isShown||n.isDefaultPrevented())return;this.isShown=!0,this.escape(),this.backdrop(function(){var n=e.support.transition&&t.$element.hasClass("fade");t.$element.parent().length||t.$element.appendTo(document.body),t.$element.show(),n&&t.$element[0].offsetWidth,t.$element.addClass("in").attr("aria-hidden",!1),t.enforceFocus(),n?t.$element.one(e.support.transition.end,function(){t.$element.focus().trigger("shown")}):t.$element.focus().trigger("shown")})},hide:function(t){t&&t.preventDefault();var n=this;t=e.Event("hide"),this.$element.trigger(t);if(!this.isShown||t.isDefaultPrevented())return;this.isShown=!1,this.escape(),e(document).off("focusin.modal"),this.$element.removeClass("in").attr("aria-hidden",!0),e.support.transition&&this.$element.hasClass("fade")?this.hideWithTransition():this.hideModal()},enforceFocus:function(){var t=this;e(document).on("focusin.modal",function(e){t.$element[0]!==e.target&&!t.$element.has(e.target).length&&t.$element.focus()})},escape:function(){var e=this;this.isShown&&this.options.keyboard?this.$element.on("keyup.dismiss.modal",function(t){t.which==27&&e.hide()}):this.isShown||this.$element.off("keyup.dismiss.modal")},hideWithTransition:function(){var t=this,n=setTimeout(function(){t.$element.off(e.support.transition.end),t.hideModal()},500);this.$element.one(e.support.transition.end,function(){clearTimeout(n),t.hideModal()})},hideModal:function(){var e=this;this.$element.hide(),this.backdrop(function(){e.removeBackdrop(),e.$element.trigger("hidden")})},removeBackdrop:function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},backdrop:function(t){var n=this,r=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var i=e.support.transition&&r;this.$backdrop=e('<div class="modal-backdrop '+r+'" />').appendTo(document.body),this.$backdrop.click(this.options.backdrop=="static"?e.proxy(this.$element[0].focus,this.$element[0]):e.proxy(this.hide,this)),i&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in");if(!t)return;i?this.$backdrop.one(e.support.transition.end,t):t()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),e.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(e.support.transition.end,t):t()):t&&t()}};var n=e.fn.modal;e.fn.modal=function(n){return this.each(function(){var r=e(this),i=r.data("modal"),s=e.extend({},e.fn.modal.defaults,r.data(),typeof n=="object"&&n);i||r.data("modal",i=new t(this,s)),typeof n=="string"?i[n]():s.show&&i.show()})},e.fn.modal.defaults={backdrop:!0,keyboard:!0,show:!0},e.fn.modal.Constructor=t,e.fn.modal.noConflict=function(){return e.fn.modal=n,this},e(document).on("click.modal.data-api",'[data-toggle="modal"]',function(t){var n=e(this),r=n.attr("href"),i=e(n.attr("data-target")||r&&r.replace(/.*(?=#[^\s]+$)/,"")),s=i.data("modal")?"toggle":e.extend({remote:!/#/.test(r)&&r},i.data(),n.data());t.preventDefault(),i.modal(s).one("hide",function(){n.focus()})})}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("tooltip",e,t)};t.prototype={constructor:t,init:function(t,n,r){var i,s,o,u,a;this.type=t,this.$element=e(n),this.options=this.getOptions(r),this.enabled=!0,o=this.options.trigger.split(" ");for(a=o.length;a--;)u=o[a],u=="click"?this.$element.on("click."+this.type,this.options.selector,e.proxy(this.toggle,this)):u!="manual"&&(i=u=="hover"?"mouseenter":"focus",s=u=="hover"?"mouseleave":"blur",this.$element.on(i+"."+this.type,this.options.selector,e.proxy(this.enter,this)),this.$element.on(s+"."+this.type,this.options.selector,e.proxy(this.leave,this)));this.options.selector?this._options=e.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},getOptions:function(t){return t=e.extend({},e.fn[this.type].defaults,this.$element.data(),t),t.delay&&typeof t.delay=="number"&&(t.delay={show:t.delay,hide:t.delay}),t},enter:function(t){var n=e.fn[this.type].defaults,r={},i;this._options&&e.each(this._options,function(e,t){n[e]!=t&&(r[e]=t)},this),i=e(t.currentTarget)[this.type](r).data(this.type);if(!i.options.delay||!i.options.delay.show)return i.show();clearTimeout(this.timeout),i.hoverState="in",this.timeout=setTimeout(function(){i.hoverState=="in"&&i.show()},i.options.delay.show)},leave:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);this.timeout&&clearTimeout(this.timeout);if(!n.options.delay||!n.options.delay.hide)return n.hide();n.hoverState="out",this.timeout=setTimeout(function(){n.hoverState=="out"&&n.hide()},n.options.delay.hide)},show:function(){var t,n,r,i,s,o,u=e.Event("show");if(this.hasContent()&&this.enabled){this.$element.trigger(u);if(u.isDefaultPrevented())return;t=this.tip(),this.setContent(),this.options.animation&&t.addClass("fade"),s=typeof this.options.placement=="function"?this.options.placement.call(this,t[0],this.$element[0]):this.options.placement,t.detach().css({top:0,left:0,display:"block"}),this.options.container?t.appendTo(this.options.container):t.insertAfter(this.$element),n=this.getPosition(),r=t[0].offsetWidth,i=t[0].offsetHeight;switch(s){case"bottom":o={top:n.top+n.height,left:n.left+n.width/2-r/2};break;case"top":o={top:n.top-i,left:n.left+n.width/2-r/2};break;case"left":o={top:n.top+n.height/2-i/2,left:n.left-r};break;case"right":o={top:n.top+n.height/2-i/2,left:n.left+n.width}}this.applyPlacement(o,s),this.$element.trigger("shown")}},applyPlacement:function(e,t){var n=this.tip(),r=n[0].offsetWidth,i=n[0].offsetHeight,s,o,u,a;n.offset(e).addClass(t).addClass("in"),s=n[0].offsetWidth,o=n[0].offsetHeight,t=="top"&&o!=i&&(e.top=e.top+i-o,a=!0),t=="bottom"||t=="top"?(u=0,e.left<0&&(u=e.left*-2,e.left=0,n.offset(e),s=n[0].offsetWidth,o=n[0].offsetHeight),this.replaceArrow(u-r+s,s,"left")):this.replaceArrow(o-i,o,"top"),a&&n.offset(e)},replaceArrow:function(e,t,n){this.arrow().css(n,e?50*(1-e/t)+"%":"")},setContent:function(){var e=this.tip(),t=this.getTitle();e.find(".tooltip-inner")[this.options.html?"html":"text"](t),e.removeClass("fade in top bottom left right")},hide:function(){function i(){var t=setTimeout(function(){n.off(e.support.transition.end).detach()},500);n.one(e.support.transition.end,function(){clearTimeout(t),n.detach()})}var t=this,n=this.tip(),r=e.Event("hide");this.$element.trigger(r);if(r.isDefaultPrevented())return;return n.removeClass("in"),e.support.transition&&this.$tip.hasClass("fade")?i():n.detach(),this.$element.trigger("hidden"),this},fixTitle:function(){var e=this.$element;(e.attr("title")||typeof e.attr("data-original-title")!="string")&&e.attr("data-original-title",e.attr("title")||"").attr("title","")},hasContent:function(){return this.getTitle()},getPosition:function(){var t=this.$element[0];return e.extend({},typeof t.getBoundingClientRect=="function"?t.getBoundingClientRect():{width:t.offsetWidth,height:t.offsetHeight},this.$element.offset())},getTitle:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-original-title")||(typeof n.title=="function"?n.title.call(t[0]):n.title),e},tip:function(){return this.$tip=this.$tip||e(this.options.template)},arrow:function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},validate:function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},enable:function(){this.enabled=!0},disable:function(){this.enabled=!1},toggleEnabled:function(){this.enabled=!this.enabled},toggle:function(t){var n=t?e(t.currentTarget)[this.type](this._options).data(this.type):this;n.tip().hasClass("in")?n.hide():n.show()},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}};var n=e.fn.tooltip;e.fn.tooltip=function(n){return this.each(function(){var r=e(this),i=r.data("tooltip"),s=typeof n=="object"&&n;i||r.data("tooltip",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.tooltip.Constructor=t,e.fn.tooltip.defaults={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1},e.fn.tooltip.noConflict=function(){return e.fn.tooltip=n,this}}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("popover",e,t)};t.prototype=e.extend({},e.fn.tooltip.Constructor.prototype,{constructor:t,setContent:function(){var e=this.tip(),t=this.getTitle(),n=this.getContent();e.find(".popover-title")[this.options.html?"html":"text"](t),e.find(".popover-content")[this.options.html?"html":"text"](n),e.removeClass("fade top bottom left right in")},hasContent:function(){return this.getTitle()||this.getContent()},getContent:function(){var e,t=this.$element,n=this.options;return e=(typeof n.content=="function"?n.content.call(t[0]):n.content)||t.attr("data-content"),e},tip:function(){return this.$tip||(this.$tip=e(this.options.template)),this.$tip},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}});var n=e.fn.popover;e.fn.popover=function(n){return this.each(function(){var r=e(this),i=r.data("popover"),s=typeof n=="object"&&n;i||r.data("popover",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.popover.Constructor=t,e.fn.popover.defaults=e.extend({},e.fn.tooltip.defaults,{placement:"right",trigger:"click",content:"",template:'<div class="popover"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),e.fn.popover.noConflict=function(){return e.fn.popover=n,this}}(window.jQuery),!function(e){"use strict";function t(t,n){var r=e.proxy(this.process,this),i=e(t).is("body")?e(window):e(t),s;this.options=e.extend({},e.fn.scrollspy.defaults,n),this.$scrollElement=i.on("scroll.scroll-spy.data-api",r),this.selector=(this.options.target||(s=e(t).attr("href"))&&s.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.$body=e("body"),this.refresh(),this.process()}t.prototype={constructor:t,refresh:function(){var t=this,n;this.offsets=e([]),this.targets=e([]),n=this.$body.find(this.selector).map(function(){var n=e(this),r=n.data("target")||n.attr("href"),i=/^#\w/.test(r)&&e(r);return i&&i.length&&[[i.position().top+(!e.isWindow(t.$scrollElement.get(0))&&t.$scrollElement.scrollTop()),r]]||null}).sort(function(e,t){return e[0]-t[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},process:function(){var e=this.$scrollElement.scrollTop()+this.options.offset,t=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,n=t-this.$scrollElement.height(),r=this.offsets,i=this.targets,s=this.activeTarget,o;if(e>=n)return s!=(o=i.last()[0])&&this.activate(o);for(o=r.length;o--;)s!=i[o]&&e>=r[o]&&(!r[o+1]||e<=r[o+1])&&this.activate(i[o])},activate:function(t){var n,r;this.activeTarget=t,e(this.selector).parent(".active").removeClass("active"),r=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',n=e(r).parent("li").addClass("active"),n.parent(".dropdown-menu").length&&(n=n.closest("li.dropdown").addClass("active")),n.trigger("activate")}};var n=e.fn.scrollspy;e.fn.scrollspy=function(n){return this.each(function(){var r=e(this),i=r.data("scrollspy"),s=typeof n=="object"&&n;i||r.data("scrollspy",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.scrollspy.Constructor=t,e.fn.scrollspy.defaults={offset:10},e.fn.scrollspy.noConflict=function(){return e.fn.scrollspy=n,this},e(window).on("load",function(){e('[data-spy="scroll"]').each(function(){var t=e(this);t.scrollspy(t.data())})})}(window.jQuery),!function(e){"use strict";var t=function(t){this.element=e(t)};t.prototype={constructor:t,show:function(){var t=this.element,n=t.closest("ul:not(.dropdown-menu)"),r=t.attr("data-target"),i,s,o;r||(r=t.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,""));if(t.parent("li").hasClass("active"))return;i=n.find(".active:last a")[0],o=e.Event("show",{relatedTarget:i}),t.trigger(o);if(o.isDefaultPrevented())return;s=e(r),this.activate(t.parent("li"),n),this.activate(s,s.parent(),function(){t.trigger({type:"shown",relatedTarget:i})})},activate:function(t,n,r){function o(){i.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),t.addClass("active"),s?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu")&&t.closest("li.dropdown").addClass("active"),r&&r()}var i=n.find("> .active"),s=r&&e.support.transition&&i.hasClass("fade");s?i.one(e.support.transition.end,o):o(),i.removeClass("in")}};var n=e.fn.tab;e.fn.tab=function(n){return this.each(function(){var r=e(this),i=r.data("tab");i||r.data("tab",i=new t(this)),typeof n=="string"&&i[n]()})},e.fn.tab.Constructor=t,e.fn.tab.noConflict=function(){return e.fn.tab=n,this},e(document).on("click.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(t){t.preventDefault(),e(this).tab("show")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.typeahead.defaults,n),this.matcher=this.options.matcher||this.matcher,this.sorter=this.options.sorter||this.sorter,this.highlighter=this.options.highlighter||this.highlighter,this.updater=this.options.updater||this.updater,this.source=this.options.source,this.$menu=e(this.options.menu),this.shown=!1,this.listen()};t.prototype={constructor:t,select:function(){var e=this.$menu.find(".active").attr("data-value");return this.$element.val(this.updater(e)).change(),this.hide()},updater:function(e){return e},show:function(){var t=e.extend({},this.$element.position(),{height:this.$element[0].offsetHeight});return this.$menu.insertAfter(this.$element).css({top:t.top+t.height,left:t.left}).show(),this.shown=!0,this},hide:function(){return this.$menu.hide(),this.shown=!1,this},lookup:function(t){var n;return this.query=this.$element.val(),!this.query||this.query.length<this.options.minLength?this.shown?this.hide():this:(n=e.isFunction(this.source)?this.source(this.query,e.proxy(this.process,this)):this.source,n?this.process(n):this)},process:function(t){var n=this;return t=e.grep(t,function(e){return n.matcher(e)}),t=this.sorter(t),t.length?this.render(t.slice(0,this.options.items)).show():this.shown?this.hide():this},matcher:function(e){return~e.toLowerCase().indexOf(this.query.toLowerCase())},sorter:function(e){var t=[],n=[],r=[],i;while(i=e.shift())i.toLowerCase().indexOf(this.query.toLowerCase())?~i.indexOf(this.query)?n.push(i):r.push(i):t.push(i);return t.concat(n,r)},highlighter:function(e){var t=this.query.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&");return e.replace(new RegExp("("+t+")","ig"),function(e,t){return"<strong>"+t+"</strong>"})},render:function(t){var n=this;return t=e(t).map(function(t,r){return t=e(n.options.item).attr("data-value",r),t.find("a").html(n.highlighter(r)),t[0]}),t.first().addClass("active"),this.$menu.html(t),this},next:function(t){var n=this.$menu.find(".active").removeClass("active"),r=n.next();r.length||(r=e(this.$menu.find("li")[0])),r.addClass("active")},prev:function(e){var t=this.$menu.find(".active").removeClass("active"),n=t.prev();n.length||(n=this.$menu.find("li").last()),n.addClass("active")},listen:function(){this.$element.on("focus",e.proxy(this.focus,this)).on("blur",e.proxy(this.blur,this)).on("keypress",e.proxy(this.keypress,this)).on("keyup",e.proxy(this.keyup,this)),this.eventSupported("keydown")&&this.$element.on("keydown",e.proxy(this.keydown,this)),this.$menu.on("click",e.proxy(this.click,this)).on("mouseenter","li",e.proxy(this.mouseenter,this)).on("mouseleave","li",e.proxy(this.mouseleave,this))},eventSupported:function(e){var t=e in this.$element;return t||(this.$element.setAttribute(e,"return;"),t=typeof this.$element[e]=="function"),t},move:function(e){if(!this.shown)return;switch(e.keyCode){case 9:case 13:case 27:e.preventDefault();break;case 38:e.preventDefault(),this.prev();break;case 40:e.preventDefault(),this.next()}e.stopPropagation()},keydown:function(t){this.suppressKeyPressRepeat=~e.inArray(t.keyCode,[40,38,9,13,27]),this.move(t)},keypress:function(e){if(this.suppressKeyPressRepeat)return;this.move(e)},keyup:function(e){switch(e.keyCode){case 40:case 38:case 16:case 17:case 18:break;case 9:case 13:if(!this.shown)return;this.select();break;case 27:if(!this.shown)return;this.hide();break;default:this.lookup()}e.stopPropagation(),e.preventDefault()},focus:function(e){this.focused=!0},blur:function(e){this.focused=!1,!this.mousedover&&this.shown&&this.hide()},click:function(e){e.stopPropagation(),e.preventDefault(),this.select(),this.$element.focus()},mouseenter:function(t){this.mousedover=!0,this.$menu.find(".active").removeClass("active"),e(t.currentTarget).addClass("active")},mouseleave:function(e){this.mousedover=!1,!this.focused&&this.shown&&this.hide()}};var n=e.fn.typeahead;e.fn.typeahead=function(n){return this.each(function(){var r=e(this),i=r.data("typeahead"),s=typeof n=="object"&&n;i||r.data("typeahead",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.typeahead.defaults={source:[],items:8,menu:'<ul class="typeahead dropdown-menu"></ul>',item:'<li><a href="#"></a></li>',minLength:1},e.fn.typeahead.Constructor=t,e.fn.typeahead.noConflict=function(){return e.fn.typeahead=n,this},e(document).on("focus.typeahead.data-api",'[data-provide="typeahead"]',function(t){var n=e(this);if(n.data("typeahead"))return;n.typeahead(n.data())})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=e.extend({},e.fn.affix.defaults,n),this.$window=e(window).on("scroll.affix.data-api",e.proxy(this.checkPosition,this)).on("click.affix.data-api",e.proxy(function(){setTimeout(e.proxy(this.checkPosition,this),1)},this)),this.$element=e(t),this.checkPosition()};t.prototype.checkPosition=function(){if(!this.$element.is(":visible"))return;var t=e(document).height(),n=this.$window.scrollTop(),r=this.$element.offset(),i=this.options.offset,s=i.bottom,o=i.top,u="affix affix-top affix-bottom",a;typeof i!="object"&&(s=o=i),typeof o=="function"&&(o=i.top()),typeof s=="function"&&(s=i.bottom()),a=this.unpin!=null&&n+this.unpin<=r.top?!1:s!=null&&r.top+this.$element.height()>=t-s?"bottom":o!=null&&n<=o?"top":!1;if(this.affixed===a)return;this.affixed=a,this.unpin=a=="bottom"?r.top-n:null,this.$element.removeClass(u).addClass("affix"+(a?"-"+a:""))};var n=e.fn.affix;e.fn.affix=function(n){return this.each(function(){var r=e(this),i=r.data("affix"),s=typeof n=="object"&&n;i||r.data("affix",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.affix.Constructor=t,e.fn.affix.defaults={offset:0},e.fn.affix.noConflict=function(){return e.fn.affix=n,this},e(window).on("load",function(){e('[data-spy="affix"]').each(function(){var t=e(this),n=t.data();n.offset=n.offset||{},n.offsetBottom&&(n.offset.bottom=n.offsetBottom),n.offsetTop&&(n.offset.top=n.offsetTop),t.affix(n)})})}(window.jQuery);
\ No newline at end of file diff --git a/rpki/gui/app/static/js/jquery-1.8.3.min.js b/rpki/gui/app/static/js/jquery-1.8.3.min.js new file mode 100644 index 00000000..83589daa --- /dev/null +++ b/rpki/gui/app/static/js/jquery-1.8.3.min.js @@ -0,0 +1,2 @@ +/*! jQuery v1.8.3 jquery.com | jquery.org/license */
+(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r<i;r++)v.event.add(t,n,u[n][r])}o.data&&(o.data=v.extend({},o.data))}function Ot(e,t){var n;if(t.nodeType!==1)return;t.clearAttributes&&t.clearAttributes(),t.mergeAttributes&&t.mergeAttributes(e),n=t.nodeName.toLowerCase(),n==="object"?(t.parentNode&&(t.outerHTML=e.outerHTML),v.support.html5Clone&&e.innerHTML&&!v.trim(t.innerHTML)&&(t.innerHTML=e.innerHTML)):n==="input"&&Et.test(e.type)?(t.defaultChecked=t.checked=e.checked,t.value!==e.value&&(t.value=e.value)):n==="option"?t.selected=e.defaultSelected:n==="input"||n==="textarea"?t.defaultValue=e.defaultValue:n==="script"&&t.text!==e.text&&(t.text=e.text),t.removeAttribute(v.expando)}function Mt(e){return typeof e.getElementsByTagName!="undefined"?e.getElementsByTagName("*"):typeof e.querySelectorAll!="undefined"?e.querySelectorAll("*"):[]}function _t(e){Et.test(e.type)&&(e.defaultChecked=e.checked)}function Qt(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=Jt.length;while(i--){t=Jt[i]+n;if(t in e)return t}return r}function Gt(e,t){return e=t||e,v.css(e,"display")==="none"||!v.contains(e.ownerDocument,e)}function Yt(e,t){var n,r,i=[],s=0,o=e.length;for(;s<o;s++){n=e[s];if(!n.style)continue;i[s]=v._data(n,"olddisplay"),t?(!i[s]&&n.style.display==="none"&&(n.style.display=""),n.style.display===""&&Gt(n)&&(i[s]=v._data(n,"olddisplay",nn(n.nodeName)))):(r=Dt(n,"display"),!i[s]&&r!=="none"&&v._data(n,"olddisplay",r))}for(s=0;s<o;s++){n=e[s];if(!n.style)continue;if(!t||n.style.display==="none"||n.style.display==="")n.style.display=t?i[s]||"":"none"}return e}function Zt(e,t,n){var r=Rt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function en(e,t,n,r){var i=n===(r?"border":"content")?4:t==="width"?1:0,s=0;for(;i<4;i+=2)n==="margin"&&(s+=v.css(e,n+$t[i],!0)),r?(n==="content"&&(s-=parseFloat(Dt(e,"padding"+$t[i]))||0),n!=="margin"&&(s-=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0)):(s+=parseFloat(Dt(e,"padding"+$t[i]))||0,n!=="padding"&&(s+=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0));return s}function tn(e,t,n){var r=t==="width"?e.offsetWidth:e.offsetHeight,i=!0,s=v.support.boxSizing&&v.css(e,"boxSizing")==="border-box";if(r<=0||r==null){r=Dt(e,t);if(r<0||r==null)r=e.style[t];if(Ut.test(r))return r;i=s&&(v.support.boxSizingReliable||r===e.style[t]),r=parseFloat(r)||0}return r+en(e,t,n||(s?"border":"content"),i)+"px"}function nn(e){if(Wt[e])return Wt[e];var t=v("<"+e+">").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write("<!doctype html><html><body>"),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u<a;u++)r=o[u],s=/^\+/.test(r),s&&(r=r.substr(1)||"*"),i=e[r]=e[r]||[],i[s?"unshift":"push"](n)}}function kn(e,n,r,i,s,o){s=s||n.dataTypes[0],o=o||{},o[s]=!0;var u,a=e[s],f=0,l=a?a.length:0,c=e===Sn;for(;f<l&&(c||!u);f++)u=a[f](n,r,i),typeof u=="string"&&(!c||o[u]?u=t:(n.dataTypes.unshift(u),u=kn(e,n,r,i,u,o)));return(c||!u)&&!o["*"]&&(u=kn(e,n,r,i,"*",o)),u}function Ln(e,n){var r,i,s=v.ajaxSettings.flatOptions||{};for(r in n)n[r]!==t&&((s[r]?e:i||(i={}))[r]=n[r]);i&&v.extend(!0,e,i)}function An(e,n,r){var i,s,o,u,a=e.contents,f=e.dataTypes,l=e.responseFields;for(s in l)s in r&&(n[l[s]]=r[s]);while(f[0]==="*")f.shift(),i===t&&(i=e.mimeType||n.getResponseHeader("content-type"));if(i)for(s in a)if(a[s]&&a[s].test(i)){f.unshift(s);break}if(f[0]in r)o=f[0];else{for(s in r){if(!f[0]||e.converters[s+" "+f[0]]){o=s;break}u||(u=s)}o=o||u}if(o)return o!==f[0]&&f.unshift(o),r[o]}function On(e,t){var n,r,i,s,o=e.dataTypes.slice(),u=o[0],a={},f=0;e.dataFilter&&(t=e.dataFilter(t,e.dataType));if(o[1])for(n in e.converters)a[n.toLowerCase()]=e.converters[n];for(;i=o[++f];)if(i!=="*"){if(u!=="*"&&u!==i){n=a[u+" "+i]||a["* "+i];if(!n)for(r in a){s=r.split(" ");if(s[1]===i){n=a[u+" "+s[0]]||a["* "+s[0]];if(n){n===!0?n=a[r]:a[r]!==!0&&(i=s[0],o.splice(f--,0,i));break}}}if(n!==!0)if(n&&e["throws"])t=n(t);else try{t=n(t)}catch(l){return{state:"parsererror",error:n?l:"No conversion from "+u+" to "+i}}}u=i}return{state:"success",data:t}}function Fn(){try{return new e.XMLHttpRequest}catch(t){}}function In(){try{return new e.ActiveXObject("Microsoft.XMLHTTP")}catch(t){}}function $n(){return setTimeout(function(){qn=t},0),qn=v.now()}function Jn(e,t){v.each(t,function(t,n){var r=(Vn[t]||[]).concat(Vn["*"]),i=0,s=r.length;for(;i<s;i++)if(r[i].call(e,t,n))return})}function Kn(e,t,n){var r,i=0,s=0,o=Xn.length,u=v.Deferred().always(function(){delete a.elem}),a=function(){var t=qn||$n(),n=Math.max(0,f.startTime+f.duration-t),r=n/f.duration||0,i=1-r,s=0,o=f.tweens.length;for(;s<o;s++)f.tweens[s].run(i);return u.notifyWith(e,[f,i,n]),i<1&&o?n:(u.resolveWith(e,[f]),!1)},f=u.promise({elem:e,props:v.extend({},t),opts:v.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:qn||$n(),duration:n.duration,tweens:[],createTween:function(t,n,r){var i=v.Tween(e,f.opts,t,n,f.opts.specialEasing[t]||f.opts.easing);return f.tweens.push(i),i},stop:function(t){var n=0,r=t?f.tweens.length:0;for(;n<r;n++)f.tweens[n].run(1);return t?u.resolveWith(e,[f,t]):u.rejectWith(e,[f,t]),this}}),l=f.props;Qn(l,f.opts.specialEasing);for(;i<o;i++){r=Xn[i].call(f,e,l,f.opts);if(r)return r}return Jn(f,l),v.isFunction(f.opts.start)&&f.opts.start.call(e,f),v.fx.timer(v.extend(a,{anim:f,queue:f.opts.queue,elem:e})),f.progress(f.opts.progress).done(f.opts.done,f.opts.complete).fail(f.opts.fail).always(f.opts.always)}function Qn(e,t){var n,r,i,s,o;for(n in e){r=v.camelCase(n),i=t[r],s=e[n],v.isArray(s)&&(i=s[1],s=e[n]=s[0]),n!==r&&(e[r]=s,delete e[n]),o=v.cssHooks[r];if(o&&"expand"in o){s=o.expand(s),delete e[r];for(n in s)n in e||(e[n]=s[n],t[n]=i)}else t[r]=i}}function Gn(e,t,n){var r,i,s,o,u,a,f,l,c,h=this,p=e.style,d={},m=[],g=e.nodeType&&Gt(e);n.queue||(l=v._queueHooks(e,"fx"),l.unqueued==null&&(l.unqueued=0,c=l.empty.fire,l.empty.fire=function(){l.unqueued||c()}),l.unqueued++,h.always(function(){h.always(function(){l.unqueued--,v.queue(e,"fx").length||l.empty.fire()})})),e.nodeType===1&&("height"in t||"width"in t)&&(n.overflow=[p.overflow,p.overflowX,p.overflowY],v.css(e,"display")==="inline"&&v.css(e,"float")==="none"&&(!v.support.inlineBlockNeedsLayout||nn(e.nodeName)==="inline"?p.display="inline-block":p.zoom=1)),n.overflow&&(p.overflow="hidden",v.support.shrinkWrapBlocks||h.done(function(){p.overflow=n.overflow[0],p.overflowX=n.overflow[1],p.overflowY=n.overflow[2]}));for(r in t){s=t[r];if(Un.exec(s)){delete t[r],a=a||s==="toggle";if(s===(g?"hide":"show"))continue;m.push(r)}}o=m.length;if(o){u=v._data(e,"fxshow")||v._data(e,"fxshow",{}),"hidden"in u&&(g=u.hidden),a&&(u.hidden=!g),g?v(e).show():h.done(function(){v(e).hide()}),h.done(function(){var t;v.removeData(e,"fxshow",!0);for(t in d)v.style(e,t,d[t])});for(r=0;r<o;r++)i=m[r],f=h.createTween(i,g?u[i]:0),d[i]=u[i]||v.style(e,i),i in u||(u[i]=f.start,g&&(f.end=f.start,f.start=i==="width"||i==="height"?1:0))}}function Yn(e,t,n,r,i){return new Yn.prototype.init(e,t,n,r,i)}function Zn(e,t){var n,r={height:e},i=0;t=t?1:0;for(;i<4;i+=2-t)n=$t[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}function tr(e){return v.isWindow(e)?e:e.nodeType===9?e.defaultView||e.parentWindow:!1}var n,r,i=e.document,s=e.location,o=e.navigator,u=e.jQuery,a=e.$,f=Array.prototype.push,l=Array.prototype.slice,c=Array.prototype.indexOf,h=Object.prototype.toString,p=Object.prototype.hasOwnProperty,d=String.prototype.trim,v=function(e,t){return new v.fn.init(e,t,n)},m=/[\-+]?(?:\d*\.|)\d+(?:[eE][\-+]?\d+|)/.source,g=/\S/,y=/\s+/,b=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,w=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a<f;a++)if((e=arguments[a])!=null)for(n in e){r=u[n],i=e[n];if(u===i)continue;l&&i&&(v.isPlainObject(i)||(s=v.isArray(i)))?(s?(s=!1,o=r&&v.isArray(r)?r:[]):o=r&&v.isPlainObject(r)?r:{},u[n]=v.extend(l,o,i)):i!==t&&(u[n]=i)}return u},v.extend({noConflict:function(t){return e.$===v&&(e.$=a),t&&e.jQuery===v&&(e.jQuery=u),v},isReady:!1,readyWait:1,holdReady:function(e){e?v.readyWait++:v.ready(!0)},ready:function(e){if(e===!0?--v.readyWait:v.isReady)return;if(!i.body)return setTimeout(v.ready,1);v.isReady=!0;if(e!==!0&&--v.readyWait>0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s<o;)if(n.apply(e[s++],r)===!1)break}else if(u){for(i in e)if(n.call(e[i],i,e[i])===!1)break}else for(;s<o;)if(n.call(e[s],s,e[s++])===!1)break;return e},trim:d&&!d.call("\ufeff\u00a0")?function(e){return e==null?"":d.call(e)}:function(e){return e==null?"":(e+"").replace(b,"")},makeArray:function(e,t){var n,r=t||[];return e!=null&&(n=v.type(e),e.length==null||n==="string"||n==="function"||n==="regexp"||v.isWindow(e)?f.call(r,e):v.merge(r,e)),r},inArray:function(e,t,n){var r;if(t){if(c)return c.call(t,e,n);r=t.length,n=n?n<0?Math.max(0,r+n):n:0;for(;n<r;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,s=0;if(typeof r=="number")for(;s<r;s++)e[i++]=n[s];else while(n[s]!==t)e[i++]=n[s++];return e.length=i,e},grep:function(e,t,n){var r,i=[],s=0,o=e.length;n=!!n;for(;s<o;s++)r=!!t(e[s],s),n!==r&&i.push(e[s]);return i},map:function(e,n,r){var i,s,o=[],u=0,a=e.length,f=e instanceof v||a!==t&&typeof a=="number"&&(a>0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u<a;u++)i=n(e[u],u,r),i!=null&&(o[o.length]=i);else for(s in e)i=n(e[s],s,r),i!=null&&(o[o.length]=i);return o.concat.apply([],o)},guid:1,proxy:function(e,n){var r,i,s;return typeof n=="string"&&(r=e[n],n=e,e=r),v.isFunction(e)?(i=l.call(arguments,2),s=function(){return e.apply(n,i.concat(l.call(arguments)))},s.guid=e.guid=e.guid||v.guid++,s):t},access:function(e,n,r,i,s,o,u){var a,f=r==null,l=0,c=e.length;if(r&&typeof r=="object"){for(l in r)v.access(e,n,l,r[l],1,o,i);s=1}else if(i!==t){a=u===t&&v.isFunction(i),f&&(a?(a=n,n=function(e,t,n){return a.call(v(e),n)}):(n.call(e,i),n=null));if(n)for(;l<c;l++)n(e[l],r,a?i.call(e[l],l,n(e[l],r)):i,u);s=1}return s?e:f?n.call(e):c?n(e[0],r):o},now:function(){return(new Date).getTime()}}),v.ready.promise=function(t){if(!r){r=v.Deferred();if(i.readyState==="complete")setTimeout(v.ready,1);else if(i.addEventListener)i.addEventListener("DOMContentLoaded",A,!1),e.addEventListener("load",v.ready,!1);else{i.attachEvent("onreadystatechange",A),e.attachEvent("onload",v.ready);var n=!1;try{n=e.frameElement==null&&i.documentElement}catch(s){}n&&n.doScroll&&function o(){if(!v.isReady){try{n.doScroll("left")}catch(e){return setTimeout(o,50)}v.ready()}}()}}return r.promise(t)},v.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(e,t){O["[object "+t+"]"]=t.toLowerCase()}),n=v(i);var M={};v.Callbacks=function(e){e=typeof e=="string"?M[e]||_(e):v.extend({},e);var n,r,i,s,o,u,a=[],f=!e.once&&[],l=function(t){n=e.memory&&t,r=!0,u=s||0,s=0,o=a.length,i=!0;for(;a&&u<o;u++)if(a[u].apply(t[0],t[1])===!1&&e.stopOnFalse){n=!1;break}i=!1,a&&(f?f.length&&l(f.shift()):n?a=[]:c.disable())},c={add:function(){if(a){var t=a.length;(function r(t){v.each(t,function(t,n){var i=v.type(n);i==="function"?(!e.unique||!c.has(n))&&a.push(n):n&&n.length&&i!=="string"&&r(n)})})(arguments),i?o=a.length:n&&(s=t,l(n))}return this},remove:function(){return a&&v.each(arguments,function(e,t){var n;while((n=v.inArray(t,a,n))>-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t<r;t++)n[t]&&v.isFunction(n[t].promise)?n[t].promise().done(o(t,f,n)).fail(s.reject).progress(o(t,a,u)):--i}return i||s.resolveWith(f,n),s.promise()}}),v.support=function(){var t,n,r,s,o,u,a,f,l,c,h,p=i.createElement("div");p.setAttribute("className","t"),p.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="<table><tr><td></td><td>t</td></tr></table>",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="<div></div>",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i<s;i++)delete r[t[i]];if(!(n?B:v.isEmptyObject)(r))return}}if(!n){delete u[a].data;if(!B(u[a]))return}o?v.cleanData([e],!0):v.support.deleteExpando||u!=u.window?delete u[a]:u[a]=null},_data:function(e,t,n){return v.data(e,t,n,!0)},acceptData:function(e){var t=e.nodeName&&v.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),v.fn.extend({data:function(e,n){var r,i,s,o,u,a=this[0],f=0,l=null;if(e===t){if(this.length){l=v.data(a);if(a.nodeType===1&&!v._data(a,"parsedAttrs")){s=a.attributes;for(u=s.length;f<u;f++)o=s[f].name,o.indexOf("data-")||(o=v.camelCase(o.substring(5)),H(a,o,l[o]));v._data(a,"parsedAttrs",!0)}}return l}return typeof e=="object"?this.each(function(){v.data(this,e)}):(r=e.split(".",2),r[1]=r[1]?"."+r[1]:"",i=r[1]+"!",v.access(this,function(n){if(n===t)return l=this.triggerHandler("getData"+i,[r[0]]),l===t&&a&&(l=v.data(a,e),l=H(a,e,l)),l===t&&r[1]?this.data(r[0]):l;r[1]=n,this.each(function(){var t=v(this);t.triggerHandler("setData"+i,r),v.data(this,e,n),t.triggerHandler("changeData"+i,r)})},null,n,arguments.length>1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length<r?v.queue(this[0],e):n===t?this:this.each(function(){var t=v.queue(this,e,n);v._queueHooks(this,e),e==="fx"&&t[0]!=="inprogress"&&v.dequeue(this,e)})},dequeue:function(e){return this.each(function(){v.dequeue(this,e)})},delay:function(e,t){return e=v.fx?v.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,s=v.Deferred(),o=this,u=this.length,a=function(){--i||s.resolveWith(o,[o])};typeof e!="string"&&(n=e,e=t),e=e||"fx";while(u--)r=v._data(o[u],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(a));return a(),s.promise(n)}});var j,F,I,q=/[\t\r\n]/g,R=/\r/g,U=/^(?:button|input)$/i,z=/^(?:button|input|object|select|textarea)$/i,W=/^a(?:rea|)$/i,X=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,V=v.support.getSetAttribute;v.fn.extend({attr:function(e,t){return v.access(this,v.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n<r;n++){i=this[n];if(i.nodeType===1)if(!i.className&&t.length===1)i.className=e;else{s=" "+i.className+" ";for(o=0,u=t.length;o<u;o++)s.indexOf(" "+t[o]+" ")<0&&(s+=t[o]+" ");i.className=v.trim(s)}}}return this},removeClass:function(e){var n,r,i,s,o,u,a;if(v.isFunction(e))return this.each(function(t){v(this).removeClass(e.call(this,t,this.className))});if(e&&typeof e=="string"||e===t){n=(e||"").split(y);for(u=0,a=this.length;u<a;u++){i=this[u];if(i.nodeType===1&&i.className){r=(" "+i.className+" ").replace(q," ");for(s=0,o=n.length;s<o;s++)while(r.indexOf(" "+n[s]+" ")>=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n<r;n++)if(this[n].nodeType===1&&(" "+this[n].className+" ").replace(q," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a<u;a++){n=r[a];if((n.selected||a===i)&&(v.support.optDisabled?!n.disabled:n.getAttribute("disabled")===null)&&(!n.parentNode.disabled||!v.nodeName(n.parentNode,"optgroup"))){t=v(n).val();if(s)return t;o.push(t)}}return o},set:function(e,t){var n=v.makeArray(t);return v(e).find("option").each(function(){this.selected=v.inArray(v(this).val(),n)>=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o<r.length;o++)i=r[o],i&&(n=v.propFix[i]||i,s=X.test(i),s||v.attr(e,i,""),e.removeAttribute(V?i:n),s&&n in e&&(e[n]=!1))}},attrHooks:{type:{set:function(e,t){if(U.test(e.nodeName)&&e.parentNode)v.error("type property can't be changed");else if(!v.support.radioValue&&t==="radio"&&v.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}},value:{get:function(e,t){return j&&v.nodeName(e,"button")?j.get(e,t):t in e?e.value:null},set:function(e,t,n){if(j&&v.nodeName(e,"button"))return j.set(e,t,n);e.value=t}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(e,n,r){var i,s,o,u=e.nodeType;if(!e||u===3||u===8||u===2)return;return o=u!==1||!v.isXMLDoc(e),o&&(n=v.propFix[n]||n,s=v.propHooks[n]),r!==t?s&&"set"in s&&(i=s.set(e,r,n))!==t?i:e[n]=r:s&&"get"in s&&(i=s.get(e,n))!==null?i:e[n]},propHooks:{tabIndex:{get:function(e){var n=e.getAttributeNode("tabindex");return n&&n.specified?parseInt(n.value,10):z.test(e.nodeName)||W.test(e.nodeName)&&e.href?0:t}}}}),F={get:function(e,n){var r,i=v.prop(e,n);return i===!0||typeof i!="boolean"&&(r=e.getAttributeNode(n))&&r.nodeValue!==!1?n.toLowerCase():t},set:function(e,t,n){var r;return t===!1?v.removeAttr(e,n):(r=v.propFix[n]||n,r in e&&(e[r]=!0),e.setAttribute(n,n.toLowerCase())),n}},V||(I={name:!0,id:!0,coords:!0},j=v.valHooks.button={get:function(e,n){var r;return r=e.getAttributeNode(n),r&&(I[n]?r.value!=="":r.specified)?r.value:t},set:function(e,t,n){var r=e.getAttributeNode(n);return r||(r=i.createAttribute(n),e.setAttributeNode(r)),r.value=t+""}},v.each(["width","height"],function(e,t){v.attrHooks[t]=v.extend(v.attrHooks[t],{set:function(e,n){if(n==="")return e.setAttribute(t,"auto"),n}})}),v.attrHooks.contenteditable={get:j.get,set:function(e,t,n){t===""&&(t="false"),j.set(e,t,n)}}),v.support.hrefNormalized||v.each(["href","src","width","height"],function(e,n){v.attrHooks[n]=v.extend(v.attrHooks[n],{get:function(e){var r=e.getAttribute(n,2);return r===null?t:r}})}),v.support.style||(v.attrHooks.style={get:function(e){return e.style.cssText.toLowerCase()||t},set:function(e,t){return e.style.cssText=t+""}}),v.support.optSelected||(v.propHooks.selected=v.extend(v.propHooks.selected,{get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}})),v.support.enctype||(v.propFix.enctype="encoding"),v.support.checkOn||v.each(["radio","checkbox"],function(){v.valHooks[this]={get:function(e){return e.getAttribute("value")===null?"on":e.value}}}),v.each(["radio","checkbox"],function(){v.valHooks[this]=v.extend(v.valHooks[this],{set:function(e,t){if(v.isArray(t))return e.checked=v.inArray(v(e).val(),t)>=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f<n.length;f++){l=J.exec(n[f])||[],c=l[1],h=(l[2]||"").split(".").sort(),g=v.event.special[c]||{},c=(s?g.delegateType:g.bindType)||c,g=v.event.special[c]||{},p=v.extend({type:c,origType:l[1],data:i,handler:r,guid:r.guid,selector:s,needsContext:s&&v.expr.match.needsContext.test(s),namespace:h.join(".")},d),m=a[c];if(!m){m=a[c]=[],m.delegateCount=0;if(!g.setup||g.setup.call(e,i,h,u)===!1)e.addEventListener?e.addEventListener(c,u,!1):e.attachEvent&&e.attachEvent("on"+c,u)}g.add&&(g.add.call(e,p),p.handler.guid||(p.handler.guid=r.guid)),s?m.splice(m.delegateCount++,0,p):m.push(p),v.event.global[c]=!0}e=null},global:{},remove:function(e,t,n,r,i){var s,o,u,a,f,l,c,h,p,d,m,g=v.hasData(e)&&v._data(e);if(!g||!(h=g.events))return;t=v.trim(Z(t||"")).split(" ");for(s=0;s<t.length;s++){o=J.exec(t[s])||[],u=a=o[1],f=o[2];if(!u){for(u in h)v.event.remove(e,u+t[s],n,r,!0);continue}p=v.event.special[u]||{},u=(r?p.delegateType:p.bindType)||u,d=h[u]||[],l=d.length,f=f?new RegExp("(^|\\.)"+f.split(".").sort().join("\\.(?:.*\\.|)")+"(\\.|$)"):null;for(c=0;c<d.length;c++)m=d[c],(i||a===m.origType)&&(!n||n.guid===m.guid)&&(!f||f.test(m.namespace))&&(!r||r===m.selector||r==="**"&&m.selector)&&(d.splice(c--,1),m.selector&&d.delegateCount--,p.remove&&p.remove.call(e,m));d.length===0&&l!==d.length&&((!p.teardown||p.teardown.call(e,f,g.handle)===!1)&&v.removeEvent(e,u,g.handle),delete h[u])}v.isEmptyObject(h)&&(delete g.handle,v.removeData(e,"events",!0))},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(n,r,s,o){if(!s||s.nodeType!==3&&s.nodeType!==8){var u,a,f,l,c,h,p,d,m,g,y=n.type||n,b=[];if(Y.test(y+v.event.triggered))return;y.indexOf("!")>=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f<m.length&&!n.isPropagationStopped();f++)l=m[f][0],n.type=m[f][1],d=(v._data(l,"events")||{})[n.type]&&v._data(l,"handle"),d&&d.apply(l,r),d=h&&l[h],d&&v.acceptData(l)&&d.apply&&d.apply(l,r)===!1&&n.preventDefault();return n.type=y,!o&&!n.isDefaultPrevented()&&(!p._default||p._default.apply(s.ownerDocument,r)===!1)&&(y!=="click"||!v.nodeName(s,"a"))&&v.acceptData(s)&&h&&s[y]&&(y!=="focus"&&y!=="blur"||n.target.offsetWidth!==0)&&!v.isWindow(s)&&(c=s[h],c&&(s[h]=null),v.event.triggered=y,s[y](),v.event.triggered=t,c&&(s[h]=c)),n.result}return},dispatch:function(n){n=v.event.fix(n||e.event);var r,i,s,o,u,a,f,c,h,p,d=(v._data(this,"events")||{})[n.type]||[],m=d.delegateCount,g=l.call(arguments),y=!n.exclusive&&!n.namespace,b=v.event.special[n.type]||{},w=[];g[0]=n,n.delegateTarget=this;if(b.preDispatch&&b.preDispatch.call(this,n)===!1)return;if(m&&(!n.button||n.type!=="click"))for(s=n.target;s!=this;s=s.parentNode||this)if(s.disabled!==!0||n.type!=="click"){u={},f=[];for(r=0;r<m;r++)c=d[r],h=c.selector,u[h]===t&&(u[h]=c.needsContext?v(h,this).index(s)>=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r<w.length&&!n.isPropagationStopped();r++){a=w[r],n.currentTarget=a.elem;for(i=0;i<a.matches.length&&!n.isImmediatePropagationStopped();i++){c=a.matches[i];if(y||!n.namespace&&!c.namespace||n.namespace_re&&n.namespace_re.test(c.namespace))n.data=c.data,n.handleObj=c,o=((v.event.special[c.origType]||{}).handle||c.handler).apply(a.elem,g),o!==t&&(n.result=o,o===!1&&(n.preventDefault(),n.stopPropagation()))}}return b.postDispatch&&b.postDispatch.call(this,n),n.result},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return e.which==null&&(e.which=t.charCode!=null?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,s,o,u=n.button,a=n.fromElement;return e.pageX==null&&n.clientX!=null&&(r=e.target.ownerDocument||i,s=r.documentElement,o=r.body,e.pageX=n.clientX+(s&&s.scrollLeft||o&&o.scrollLeft||0)-(s&&s.clientLeft||o&&o.clientLeft||0),e.pageY=n.clientY+(s&&s.scrollTop||o&&o.scrollTop||0)-(s&&s.clientTop||o&&o.clientTop||0)),!e.relatedTarget&&a&&(e.relatedTarget=a===e.target?n.toElement:a),!e.which&&u!==t&&(e.which=u&1?1:u&2?3:u&4?2:0),e}},fix:function(e){if(e[v.expando])return e;var t,n,r=e,s=v.event.fixHooks[e.type]||{},o=s.props?this.props.concat(s.props):this.props;e=v.Event(r);for(t=o.length;t;)n=o[--t],e[n]=r[n];return e.target||(e.target=r.srcElement||i),e.target.nodeType===3&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,r):e},special:{load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(e,t,n){v.isWindow(this)&&(this.onbeforeunload=n)},teardown:function(e,t){this.onbeforeunload===t&&(this.onbeforeunload=null)}}},simulate:function(e,t,n,r){var i=v.extend(new v.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?v.event.trigger(i,null,t):v.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},v.event.handle=v.event.dispatch,v.removeEvent=i.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]=="undefined"&&(e[r]=null),e.detachEvent(r,n))},v.Event=function(e,t){if(!(this instanceof v.Event))return new v.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?tt:et):this.type=e,t&&v.extend(this,t),this.timeStamp=e&&e.timeStamp||v.now(),this[v.expando]=!0},v.Event.prototype={preventDefault:function(){this.isDefaultPrevented=tt;var e=this.originalEvent;if(!e)return;e.preventDefault?e.preventDefault():e.returnValue=!1},stopPropagation:function(){this.isPropagationStopped=tt;var e=this.originalEvent;if(!e)return;e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=tt,this.stopPropagation()},isDefaultPrevented:et,isPropagationStopped:et,isImmediatePropagationStopped:et},v.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){v.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,s=e.handleObj,o=s.selector;if(!i||i!==r&&!v.contains(r,i))e.type=s.origType,n=s.handler.apply(this,arguments),e.type=t;return n}}}),v.support.submitBubbles||(v.event.special.submit={setup:function(){if(v.nodeName(this,"form"))return!1;v.event.add(this,"click._submit keypress._submit",function(e){var n=e.target,r=v.nodeName(n,"input")||v.nodeName(n,"button")?n.form:t;r&&!v._data(r,"_submit_attached")&&(v.event.add(r,"submit._submit",function(e){e._submit_bubble=!0}),v._data(r,"_submit_attached",!0))})},postDispatch:function(e){e._submit_bubble&&(delete e._submit_bubble,this.parentNode&&!e.isTrigger&&v.event.simulate("submit",this.parentNode,e,!0))},teardown:function(){if(v.nodeName(this,"form"))return!1;v.event.remove(this,"._submit")}}),v.support.changeBubbles||(v.event.special.change={setup:function(){if($.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")v.event.add(this,"propertychange._change",function(e){e.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),v.event.add(this,"click._change",function(e){this._just_changed&&!e.isTrigger&&(this._just_changed=!1),v.event.simulate("change",this,e,!0)});return!1}v.event.add(this,"beforeactivate._change",function(e){var t=e.target;$.test(t.nodeName)&&!v._data(t,"_change_attached")&&(v.event.add(t,"change._change",function(e){this.parentNode&&!e.isSimulated&&!e.isTrigger&&v.event.simulate("change",this.parentNode,e,!0)}),v._data(t,"_change_attached",!0))})},handle:function(e){var t=e.target;if(this!==t||e.isSimulated||e.isTrigger||t.type!=="radio"&&t.type!=="checkbox")return e.handleObj.handler.apply(this,arguments)},teardown:function(){return v.event.remove(this,"._change"),!$.test(this.nodeName)}}),v.support.focusinBubbles||v.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){v.event.simulate(t,e.target,v.event.fix(e),!0)};v.event.special[t]={setup:function(){n++===0&&i.addEventListener(e,r,!0)},teardown:function(){--n===0&&i.removeEventListener(e,r,!0)}}}),v.fn.extend({on:function(e,n,r,i,s){var o,u;if(typeof e=="object"){typeof n!="string"&&(r=r||n,n=t);for(u in e)this.on(u,n,r,e[u],s);return this}r==null&&i==null?(i=n,r=n=t):i==null&&(typeof n=="string"?(i=r,r=t):(i=r,r=n,n=t));if(i===!1)i=et;else if(!i)return this;return s===1&&(o=i,i=function(e){return v().off(e),o.apply(this,arguments)},i.guid=o.guid||(o.guid=v.guid++)),this.each(function(){v.event.add(this,e,i,r,n)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,n,r){var i,s;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,v(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if(typeof e=="object"){for(s in e)this.off(s,n,e[s]);return this}if(n===!1||typeof n=="function")r=n,n=t;return r===!1&&(r=et),this.each(function(){v.event.remove(this,e,r,n)})},bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},live:function(e,t,n){return v(this.context).on(e,this.selector,t,n),this},die:function(e,t){return v(this.context).off(e,this.selector||"**",t),this},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return arguments.length===1?this.off(e,"**"):this.off(t,e||"**",n)},trigger:function(e,t){return this.each(function(){v.event.trigger(e,t,this)})},triggerHandler:function(e,t){if(this[0])return v.event.trigger(e,t,this[0],!0)},toggle:function(e){var t=arguments,n=e.guid||v.guid++,r=0,i=function(n){var i=(v._data(this,"lastToggle"+e.guid)||0)%r;return v._data(this,"lastToggle"+e.guid,i+1),n.preventDefault(),t[i].apply(this,arguments)||!1};i.guid=n;while(r<t.length)t[r++].guid=n;return this.click(i)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),v.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){v.fn[t]=function(e,n){return n==null&&(n=e,e=null),arguments.length>0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u<a;u++)if(s=e[u])if(!n||n(s,r,i))o.push(s),f&&t.push(u);return o}function ct(e,t,n,r,i,s){return r&&!r[d]&&(r=ct(r)),i&&!i[d]&&(i=ct(i,s)),N(function(s,o,u,a){var f,l,c,h=[],p=[],d=o.length,v=s||dt(t||"*",u.nodeType?[u]:u,[]),m=e&&(s||!t)?lt(v,h,e,u,a):v,g=n?i||(s?e:d||r)?[]:o:m;n&&n(m,g,u,a);if(r){f=lt(g,p),r(f,[],u,a),l=f.length;while(l--)if(c=f[l])g[p[l]]=!(m[p[l]]=c)}if(s){if(i||e){if(i){f=[],l=g.length;while(l--)(c=g[l])&&f.push(m[l]=c);i(null,g=[],f,a)}l=g.length;while(l--)(c=g[l])&&(f=i?T.call(s,c):h[l])>-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a<s;a++)if(n=i.relative[e[a].type])h=[at(ft(h),n)];else{n=i.filter[e[a].type].apply(null,e[a].matches);if(n[d]){r=++a;for(;r<s;r++)if(i.relative[e[r].type])break;return ct(a>1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a<r&&ht(e.slice(a,r)),r<s&&ht(e=e.slice(r)),r<s&&e.join(""))}h.push(n)}return ft(h)}function pt(e,t){var r=t.length>0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r<i;r++)nt(e,t[r],n);return n}function vt(e,t,n,r,s){var o,u,f,l,c,h=ut(e),p=h.length;if(!r&&h.length===1){u=h[0]=h[0].slice(0);if(u.length>2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;t<n;t++)if(this[t]===e)return t;return-1},N=function(e,t){return e[d]=t==null||t,e},C=function(){var e={},t=[];return N(function(n,r){return t.push(n)>i.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="<a href='#'></a>",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="<select></select>";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="<a name='"+d+"'></a><div name='"+d+"'></div>",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:st(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:st(function(e,t,n){for(var r=n<0?n+t:n;--r>=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}},f=y.compareDocumentPosition?function(e,t){return e===t?(l=!0,0):(!e.compareDocumentPosition||!t.compareDocumentPosition?e.compareDocumentPosition:e.compareDocumentPosition(t)&4)?-1:1}:function(e,t){if(e===t)return l=!0,0;if(e.sourceIndex&&t.sourceIndex)return e.sourceIndex-t.sourceIndex;var n,r,i=[],s=[],o=e.parentNode,u=t.parentNode,a=o;if(o===u)return ot(e,t);if(!o)return-1;if(!u)return 1;while(a)i.unshift(a),a=a.parentNode;a=u;while(a)s.unshift(a),a=a.parentNode;n=i.length,r=s.length;for(var f=0;f<n&&f<r;f++)if(i[f]!==s[f])return ot(i[f],s[f]);return f===n?ot(e,s[f],-1):ot(i[f],t,1)},[0,0].sort(f),h=!l,nt.uniqueSort=function(e){var t,n=[],r=1,i=0;l=h,e.sort(f);if(l){for(;t=e[r];r++)t===e[r-1]&&(i=n.push(r));while(i--)e.splice(n[i],1)}return e},nt.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},a=nt.compile=function(e,t){var n,r=[],i=[],s=A[d][e+" "];if(!s){t||(t=ut(e)),n=t.length;while(n--)s=ht(t[n]),s[d]?r.push(s):i.push(s);s=A(e,pt(i,r))}return s},g.querySelectorAll&&function(){var e,t=vt,n=/'|\\/g,r=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,i=[":focus"],s=[":active"],u=y.matchesSelector||y.mozMatchesSelector||y.webkitMatchesSelector||y.oMatchesSelector||y.msMatchesSelector;K(function(e){e.innerHTML="<select><option selected=''></option></select>",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="<p test=''></p>",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="<input type='hidden'/>",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t<n;t++)if(v.contains(u[t],this))return!0});o=this.pushStack("","find",e);for(t=0,n=this.length;t<n;t++){r=o.length,v.find(e,this[t],o);if(t>0)for(i=r;i<o.length;i++)for(s=0;s<r;s++)if(o[s]===o[i]){o.splice(i--,1);break}}return o},has:function(e){var t,n=v(e,this),r=n.length;return this.filter(function(){for(t=0;t<r;t++)if(v.contains(this,n[t]))return!0})},not:function(e){return this.pushStack(ft(this,e,!1),"not",e)},filter:function(e){return this.pushStack(ft(this,e,!0),"filter",e)},is:function(e){return!!e&&(typeof e=="string"?st.test(e)?v(e,this.context).index(this[0])>=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r<i;r++){n=this[r];while(n&&n.ownerDocument&&n!==t&&n.nodeType!==11){if(o?o.index(n)>-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/<tbody/i,gt=/<|&#?\w+;/,yt=/<(?:script|style|link)/i,bt=/<(?:script|object|embed|option|style)/i,wt=new RegExp("<(?:"+ct+")[\\s/>]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*<!(?:\[CDATA\[|\-\-)|[\]\-]{2}>\s*$/g,Nt={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X<div>","</div>"]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1></$2>");try{for(;r<i;r++)n=this[r]||{},n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),n.innerHTML=e);n=0}catch(s){}}n&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(e){return ut(this[0])?this.length?this.pushStack(v(v.isFunction(e)?e():e),"replaceWith",e):this:v.isFunction(e)?this.each(function(t){var n=v(this),r=n.html();n.replaceWith(e.call(this,t,r))}):(typeof e!="string"&&(e=v(e).detach()),this.each(function(){var t=this.nextSibling,n=this.parentNode;v(this).remove(),t?v(t).before(e):v(n).append(e)}))},detach:function(e){return this.remove(e,!0)},domManip:function(e,n,r){e=[].concat.apply([],e);var i,s,o,u,a=0,f=e[0],l=[],c=this.length;if(!v.support.checkClone&&c>1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a<c;a++)r.call(n&&v.nodeName(this[a],"table")?Lt(this[a],"tbody"):this[a],a===u?o:v.clone(o,!0,!0))}o=s=null,l.length&&v.each(l,function(e,t){t.src?v.ajax?v.ajax({url:t.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):v.error("no ajax"):v.globalEval((t.text||t.textContent||t.innerHTML||"").replace(Tt,"")),t.parentNode&&t.parentNode.removeChild(t)})}return this}}),v.buildFragment=function(e,n,r){var s,o,u,a=e[0];return n=n||i,n=!n.nodeType&&n[0]||n,n=n.ownerDocument||n,e.length===1&&typeof a=="string"&&a.length<512&&n===i&&a.charAt(0)==="<"&&!bt.test(a)&&(v.support.checkClone||!St.test(a))&&(v.support.html5Clone||!wt.test(a))&&(o=!0,s=v.fragments[a],u=s!==t),s||(s=n.createDocumentFragment(),v.clean(e,n,s,r),o&&(v.fragments[a]=u&&s)),{fragment:s,cacheable:o}},v.fragments={},v.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){v.fn[e]=function(n){var r,i=0,s=[],o=v(n),u=o.length,a=this.length===1&&this[0].parentNode;if((a==null||a&&a.nodeType===11&&a.childNodes.length===1)&&u===1)return o[t](this[0]),this;for(;i<u;i++)r=(i>0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1></$2>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]==="<table>"&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("<div>").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r<i;r++)n=e[r],Vn[n]=Vn[n]||[],Vn[n].unshift(t)},prefilter:function(e,t){t?Xn.unshift(e):Xn.push(e)}}),v.Tween=Yn,Yn.prototype={constructor:Yn,init:function(e,t,n,r,i,s){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=s||(v.cssNumber[n]?"":"px")},cur:function(){var e=Yn.propHooks[this.prop];return e&&e.get?e.get(this):Yn.propHooks._default.get(this)},run:function(e){var t,n=Yn.propHooks[this.prop];return this.options.duration?this.pos=t=v.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):Yn.propHooks._default.set(this),this}},Yn.prototype.init.prototype=Yn.prototype,Yn.propHooks={_default:{get:function(e){var t;return e.elem[e.prop]==null||!!e.elem.style&&e.elem.style[e.prop]!=null?(t=v.css(e.elem,e.prop,!1,""),!t||t==="auto"?0:t):e.elem[e.prop]},set:function(e){v.fx.step[e.prop]?v.fx.step[e.prop](e):e.elem.style&&(e.elem.style[v.cssProps[e.prop]]!=null||v.cssHooks[e.prop])?v.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},Yn.propHooks.scrollTop=Yn.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},v.each(["toggle","show","hide"],function(e,t){var n=v.fn[t];v.fn[t]=function(r,i,s){return r==null||typeof r=="boolean"||!e&&v.isFunction(r)&&v.isFunction(i)?n.apply(this,arguments):this.animate(Zn(t,!0),r,i,s)}}),v.fn.extend({fadeTo:function(e,t,n,r){return this.filter(Gt).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=v.isEmptyObject(e),s=v.speed(t,n,r),o=function(){var t=Kn(this,v.extend({},e),s);i&&t.stop(!0)};return i||s.queue===!1?this.each(o):this.queue(s.queue,o)},stop:function(e,n,r){var i=function(e){var t=e.stop;delete e.stop,t(r)};return typeof e!="string"&&(r=n,n=e,e=t),n&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,n=e!=null&&e+"queueHooks",s=v.timers,o=v._data(this);if(n)o[n]&&o[n].stop&&i(o[n]);else for(n in o)o[n]&&o[n].stop&&Wn.test(n)&&i(o[n]);for(n=s.length;n--;)s[n].elem===this&&(e==null||s[n].queue===e)&&(s[n].anim.stop(r),t=!1,s.splice(n,1));(t||!r)&&v.dequeue(this,e)})}}),v.each({slideDown:Zn("show"),slideUp:Zn("hide"),slideToggle:Zn("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){v.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),v.speed=function(e,t,n){var r=e&&typeof e=="object"?v.extend({},e):{complete:n||!n&&t||v.isFunction(e)&&e,duration:e,easing:n&&t||t&&!v.isFunction(t)&&t};r.duration=v.fx.off?0:typeof r.duration=="number"?r.duration:r.duration in v.fx.speeds?v.fx.speeds[r.duration]:v.fx.speeds._default;if(r.queue==null||r.queue===!0)r.queue="fx";return r.old=r.complete,r.complete=function(){v.isFunction(r.old)&&r.old.call(this),r.queue&&v.dequeue(this,r.queue)},r},v.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},v.timers=[],v.fx=Yn.prototype.init,v.fx.tick=function(){var e,n=v.timers,r=0;qn=v.now();for(;r<n.length;r++)e=n[r],!e()&&n[r]===e&&n.splice(r--,1);n.length||v.fx.stop(),qn=t},v.fx.timer=function(e){e()&&v.timers.push(e)&&!Rn&&(Rn=setInterval(v.fx.tick,v.fx.interval))},v.fx.interval=13,v.fx.stop=function(){clearInterval(Rn),Rn=null},v.fx.speeds={slow:600,fast:200,_default:400},v.fx.step={},v.expr&&v.expr.filters&&(v.expr.filters.animated=function(e){return v.grep(v.timers,function(t){return e===t.elem}).length});var er=/^(?:body|html)$/i;v.fn.offset=function(e){if(arguments.length)return e===t?this:this.each(function(t){v.offset.setOffset(this,e,t)});var n,r,i,s,o,u,a,f={top:0,left:0},l=this[0],c=l&&l.ownerDocument;if(!c)return;return(r=c.body)===l?v.offset.bodyOffset(l):(n=c.documentElement,v.contains(n,l)?(typeof l.getBoundingClientRect!="undefined"&&(f=l.getBoundingClientRect()),i=tr(c),s=n.clientTop||r.clientTop||0,o=n.clientLeft||r.clientLeft||0,u=i.pageYOffset||n.scrollTop,a=i.pageXOffset||n.scrollLeft,{top:f.top+u-s,left:f.left+a-o}):f)},v.offset={bodyOffset:function(e){var t=e.offsetTop,n=e.offsetLeft;return v.support.doesNotIncludeMarginInBodyOffset&&(t+=parseFloat(v.css(e,"marginTop"))||0,n+=parseFloat(v.css(e,"marginLeft"))||0),{top:t,left:n}},setOffset:function(e,t,n){var r=v.css(e,"position");r==="static"&&(e.style.position="relative");var i=v(e),s=i.offset(),o=v.css(e,"top"),u=v.css(e,"left"),a=(r==="absolute"||r==="fixed")&&v.inArray("auto",[o,u])>-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window);
\ No newline at end of file diff --git a/rpki/gui/app/templates/404.html b/rpki/gui/app/templates/404.html new file mode 100644 index 00000000..76ef3aee --- /dev/null +++ b/rpki/gui/app/templates/404.html @@ -0,0 +1,11 @@ +{% extends "base.html" %} + +{% block content %} +<div class="page-header"> + <h1>Page Not Found</h1> +</div> + +<div class="alert alert-error"> + <strong>Whoops!</strong> I could not find the page you requested. +</div> +{% endblock content %} diff --git a/rpki/gui/app/templates/500.html b/rpki/gui/app/templates/500.html new file mode 100644 index 00000000..216fe8ae --- /dev/null +++ b/rpki/gui/app/templates/500.html @@ -0,0 +1,11 @@ +{% extends "base.html" %} + +{% block content %} +<div class="page-header"> + <h1>Internal Server Error</h1> +</div> + +<div class="alert alert-error"> + <strong>Whoops!</strong> The administrator has been notified of this error. +</div> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/alert_confirm_clear.html b/rpki/gui/app/templates/app/alert_confirm_clear.html new file mode 100644 index 00000000..5d7fcf04 --- /dev/null +++ b/rpki/gui/app/templates/app/alert_confirm_clear.html @@ -0,0 +1,21 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class='page-header'> + <h1>Delete all alerts</h1> +</div> + +<div class="row-fluid"> + <div class="alert"> + Please confirm that you would like to delete all alerts. + </div> + <form method="POST"> + {% csrf_token %} + <div class="form-actions"> + <button class="btn btn-danger" type="submit"><i class="icon-trash"></i> Delete All</button> + <a class="btn" href="{% url "alert-list" %}">Cancel</a> + </div> + </form> +</div> +{% endblock %} diff --git a/rpki/gui/app/templates/app/alert_confirm_delete.html b/rpki/gui/app/templates/app/alert_confirm_delete.html new file mode 100644 index 00000000..78c84917 --- /dev/null +++ b/rpki/gui/app/templates/app/alert_confirm_delete.html @@ -0,0 +1,17 @@ +{% extends "app/alert_detail.html" %} +{% load url from future %} + +{% block action %} +<div class="row-fluid"> + <div class="alert"> + Please confirm that you would like to delete this alert. + </div> + <form method="POST"> + {% csrf_token %} + <div class="form-actions"> + <button class="btn btn-danger" type="submit"><i class="icon-trash"></i> Delete</button> + <a class="btn" href="{{ object.get_absolute_url }}">Cancel</a> + </div> + </form> +</div> +{% endblock action %} diff --git a/rpki/gui/app/templates/app/alert_detail.html b/rpki/gui/app/templates/app/alert_detail.html new file mode 100644 index 00000000..b3a73b7e --- /dev/null +++ b/rpki/gui/app/templates/app/alert_detail.html @@ -0,0 +1,31 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} +<div class="page-header"> + <h1>Alert Detail <small>{{ object.subject }}</small></h1> +</div> + +<div class="row-fluid"> +<table class="table"> + <tr> + <th>Date:</th><td> {{ object.when }}</td> + </tr> + <tr> + <th>Severity:</th><td><span class="label {% severity_class object.severity %}">{{ object.get_severity_display }}</span></td> + </tr> +</table> + +<p> +{{ object.text }} + +</div> + +{% block action %} +<div class="row-fluid"> +<a class="btn btn-danger" title="delete this alert" href="{% url "alert-delete" object.pk %}"><i class="icon-trash"></i> Delete</a> +</div> +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/alert_list.html b/rpki/gui/app/templates/app/alert_list.html new file mode 100644 index 00000000..dd0530e4 --- /dev/null +++ b/rpki/gui/app/templates/app/alert_list.html @@ -0,0 +1,31 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Alerts</h1> +</div> + +<table class="table table-striped"> + <thead> + <tr> + <th>#</th> + <th>Date</th> + <th>Subject</th> + </tr> + </thead> + <tbody> + {% for obj in object_list %} + <tr {% if not obj.seen %}style="font-weight: bold" {% endif %}class="{% if obj.severity == 1 %}warning{% endif %} {% if obj.severity == 2 %}error{% endif %}"> + <td>{# <input type="checkbox"> #}</td> + <td>{{ obj.when }}</td> + <td><a href="{{ obj.get_absolute_url }}" title="view text of alert">{{ obj.subject }}</a></td> + </tr> + {% endfor %} + </tbody> +</table> + +<div class='row-fluid'> + <a class="btn btn-danger" href="{% url 'alert-clear-all' %}"><i class='icon-trash'></i> Delete All</a> +</div> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/app_base.html b/rpki/gui/app/templates/app/app_base.html new file mode 100644 index 00000000..4fb5f731 --- /dev/null +++ b/rpki/gui/app/templates/app/app_base.html @@ -0,0 +1,31 @@ +{% extends "base.html" %} +{# this can be removed when django 1.4 is EOL, because it is the default behavior in 1.5 #} +{% load url from future %} +{% load app_extras %} + +{# This template defines the common structure for the rpki.gui.app application. #} + +{% block sidebar %} + +<h2>{{ request.session.handle }}</h2> + +{# common navigation #} + +<ul class='nav nav-list'> + {% if request.session.handle %} + <li><a href="{% url "rpki.gui.app.views.dashboard" %}">dashboard</a></li> + <li><a href="{% url "rpki.gui.app.views.route_view" %}">routes</a></li> + <li><a href="{% url "alert-list" %}">alerts {% alert_count request.session.handle %}</a></li> + <li class="divider"></li> + {% endif %} + <li><a href="{% url "rpki.gui.app.views.conf_list" %}" title="select a different resource handle to manage">select identity</a></li> +{% if request.user.is_superuser %} + <li class="divider"></li> + <li><a href="{% url "rpki.gui.app.views.user_list" %}" title="manage users"><i class="icon-user"></i> web users</a></li> + <li><a href="{% url "rpki.gui.app.views.resource_holder_list" %}" title="manage resource holders"><i class="icon-user"></i> resource holders</a></li> + <li><a href="{% url "rpki.gui.app.views.client_list" %}" title="manage repository clients">repository clients</a></li> +{% endif %} +{% block sidebar_extra %}{% endblock %} +</ul> + +{% endblock sidebar %} diff --git a/rpki/gui/app/templates/app/app_confirm_delete.html b/rpki/gui/app/templates/app/app_confirm_delete.html new file mode 100644 index 00000000..7c35a733 --- /dev/null +++ b/rpki/gui/app/templates/app/app_confirm_delete.html @@ -0,0 +1,21 @@ +{% extends "app/app_base.html" %} + +{% block content %} +<div class='page-title'> + <h1>{{ form_title }}</h1> +</div> + +<div class='alert alert-block'> + <h4>Warning!</h4> + <strong>Please confirm</strong> that you would like to delete this object. +</div> + +<form method='POST' action=""> + {% csrf_token %} + {{ form }} + <div class="form-actions"> + <input class='btn btn-danger' value='Delete' type='submit'> + <a class='btn' href="{{ cancel_url }}">Cancel</a> + </div> +</form> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/app_form.html b/rpki/gui/app/templates/app/app_form.html new file mode 100644 index 00000000..b6ab60a2 --- /dev/null +++ b/rpki/gui/app/templates/app/app_form.html @@ -0,0 +1,19 @@ +{% extends "app/app_base.html" %} + +{% block content %} +<div class="page-header"> + <h1>{{ form_title }}</h1> +</div> + +{# allow this template to be subclassed to fill in extra information, such as warnings #} +{% block form_info %}{% endblock form_info %} + +<form method="POST" action="" enctype="multipart/form-data" class="form-horizontal"> + {% csrf_token %} + {% include "app/bootstrap_form.html" %} + <div class="form-actions"> + <input class='btn btn-primary' type='submit' value='Save'> + <a class='btn' href="{{ cancel_url }}">Cancel</a> + </div> +</form> +{% endblock %} diff --git a/rpki/gui/app/templates/app/bootstrap_form.html b/rpki/gui/app/templates/app/bootstrap_form.html new file mode 100644 index 00000000..c6fd5424 --- /dev/null +++ b/rpki/gui/app/templates/app/bootstrap_form.html @@ -0,0 +1,26 @@ +{% if form.non_field_errors %} +<div class="alert alert-block alert-error"> + {{ form.non_field_errors }} +</div> +{% endif %} + +{% for field in form %} + +{% if field.is_hidden %} +{{ field }} +{% else %} +<div class="control-group {% if field.errors %}error{% endif %}"> + <label class="control-label" for="{{ field.html_name }}">{{ field.label }}</label> + <div class="controls"> + {{ field }} + {% if field.help_text %} + <span class="help-inline">{{ field.help_text }}</span> + {% endif %} + {% if field.errors %} + <span class="help-inline">{{ field.errors }}</span> + {% endif %} + </div> +</div> +{% endif %} + +{% endfor %} diff --git a/rpki/gui/app/templates/app/child_detail.html b/rpki/gui/app/templates/app/child_detail.html new file mode 100644 index 00000000..8178e179 --- /dev/null +++ b/rpki/gui/app/templates/app/child_detail.html @@ -0,0 +1,48 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Child: {{ object.handle }}</h1> +</div> + +<div class='row-fluid'> + <p><strong>Valid until</strong> {{ object.valid_until }} +</div> + +<div class='row-fluid'> + <div class='span6'> + <strong>Addresses</strong> + {% if object.address_ranges.all %} + <ul class='unstyled'> + {% for a in object.address_ranges.all %} + <li>{{ a.as_resource_range }}</li> + {% endfor %} + </ul> + {% else %} + <p style='font-style:italic'>none</p> + {% endif %} + </div> + <div class='span6'> + <strong>ASNs</strong> + {% if object.asns.all %} + <ul class='unstyled'> + {% for a in object.asns.all %} + <li>{{ a.as_resource_range }}</li> + {% endfor %} + </ul> + {% else %} + <p style='font-style:italic'>none</p> + {% endif %} + </div> +</div> + +{% block action %} +<a class='btn' href="{% url "rpki.gui.app.views.child_edit" object.pk %}" title='Edit this child'><i class="icon-edit"></i> Edit</a> +<a class='btn' href="{% url "rpki.gui.app.views.child_add_asn" object.pk %}" title='Delegate an ASN to this child'><i class="icon-plus-sign"></i> AS</a> +<a class='btn' href="{% url "rpki.gui.app.views.child_add_prefix" object.pk %}" title='Delegate a prefix to this child'><i class="icon-plus-sign"></i> Prefix</a> +<a class='btn' href="{% url "rpki.gui.app.views.child_response" object.pk %}" title='Download XML file to send to child'><i class="icon-download"></i> Export</a> +<a class="btn" href="{% url "rpki.gui.app.views.child_delete" object.pk %}" title="Delete this child"><i class="icon-trash"></i> Delete</a> +{% endblock %} + +{% endblock %} diff --git a/rpki/gui/app/templates/app/client_detail.html b/rpki/gui/app/templates/app/client_detail.html new file mode 100644 index 00000000..3117e859 --- /dev/null +++ b/rpki/gui/app/templates/app/client_detail.html @@ -0,0 +1,25 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Repository Client: {{ object.handle }}</h1> +</div> + +<table class="table"> + <tr> + <td>Name</td> + <td>{{ object.handle }} </td> + </tr> + <tr> + <td>SIA</td> + <td>{{ object.sia_base }}</td> + </tr> +</table> + +{% block action %} +<a class="btn" href="{% url "client-export" object.pk %}" title="Download XML response to send to publication client"><i class="icon-download"></i> Export</a> +<a class="btn" href="{% url "rpki.gui.app.views.client_delete" object.pk %}"><i class="icon-trash"></i> Delete</a> +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/client_list.html b/rpki/gui/app/templates/app/client_list.html new file mode 100644 index 00000000..12987c53 --- /dev/null +++ b/rpki/gui/app/templates/app/client_list.html @@ -0,0 +1,22 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Repository Clients</h1> +</div> +<table class="table table-striped"> + <thead><tr><th>Handle</th><th>Action</th></tr></thead> + <tbody> + {% for client in object_list %} + <tr> + <td><a href="{% url "rpki.gui.app.views.client_detail" client.pk %}">{{ client.handle }}</a></td> + <td> + <a class="btn btn-mini" href="{% url "rpki.gui.app.views.client_delete" client.pk %}" title="Delete"><i class="icon-trash"></i></a> + </td> + </tr> + {% endfor %} + </tbody> +</table> +<a class="btn" href="{% url "rpki.gui.app.views.client_import" %}"><i class="icon-upload"></i> Import</a> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/conf_empty.html b/rpki/gui/app/templates/app/conf_empty.html new file mode 100644 index 00000000..efe06f14 --- /dev/null +++ b/rpki/gui/app/templates/app/conf_empty.html @@ -0,0 +1,17 @@ +{% extends "base.html" %} +{% load url from future %} + +{% block content %} + +{% if request.user.is_superuser %} +<div class="alert alert-info"> +There are currently no resource holders on this system. +</div> +<a class="btn" href="{% url "rpki.gui.app.views.resource_holder_create" %}" title="create a new resource holder"><i class="icon-plus-sign"></i> Create</a> +{% else %} +<div class="alert alert-error"> +Your account does not have permission to manage any resource handles on this server. Please contact your portal-gui adminstrator. +</div> +{% endif %} + +{% endblock %} diff --git a/rpki/gui/app/templates/app/conf_list.html b/rpki/gui/app/templates/app/conf_list.html new file mode 100644 index 00000000..dce6d59e --- /dev/null +++ b/rpki/gui/app/templates/app/conf_list.html @@ -0,0 +1,17 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Handle List</h1> +</div> + +<p>Please select a handle.</p> + +<ul> + {% for c in conf_list %} + <li><a href="{% url "rpki.gui.app.views.conf_select" %}?handle={{ c.handle }}&next={{ next_url }}">{{ c.handle }}</a></li> + {% endfor %} +</ul> + +{% endblock %} diff --git a/rpki/gui/app/templates/app/dashboard.html b/rpki/gui/app/templates/app/dashboard.html new file mode 100644 index 00000000..65dbb90f --- /dev/null +++ b/rpki/gui/app/templates/app/dashboard.html @@ -0,0 +1,230 @@ +{% extends "app/app_base.html" %} + +{# this can be removed when django 1.4 is EOL, because it is the default behavior in 1.5 #} +{% load url from future %} + +{% block sidebar_extra %} + <li class="divider"></li> + <li><a href="{% url "rpki.gui.app.views.conf_export" %}" title="download XML identity to send to parent"> + {#<i class="icon-download"></i> #}export identity</a></li> +{% endblock sidebar_extra %} + +{% block content %} +<div class='row-fluid'> + <div class='span6'> + <div class="page-header"> + <h1>Resources</h1> + </div> + + <table class='table table-condensed-table table-striped'> + <tr> + <th>Resource</th> + <th>Valid Until</th> + <th>Parent</th> + </tr> + + {% for object in asns %} + <tr> + <td>{{ object }}</td> + <td>{{ object.cert.not_after }}</td> + <td> + {% if object.cert.parent %} + <a href="{{ object.cert.parent.get_absolute_url }}">{{ object.cert.parent.handle }}</a> + {% endif %} + </td> + </tr> + {% endfor %} + + {% for object in prefixes %} + <tr> + <td>{{ object.as_resource_range }}</td> + <td>{{ object.cert.not_after }}</td> + <td> + {% if object.cert.parent %} + <a href="{{ object.cert.parent.get_absolute_url }}">{{ object.cert.parent.handle }}</a> + {% endif %} + </td> + </tr> + {% endfor %} + + {% if prefixes_v6 %} + {% for object in prefixes_v6 %} + <tr> + <td>{{ object.as_resource_range }}</td> + <td>{{ object.cert.not_after }}</td> + <td> + {% if object.cert.parent %} + <a href="{{ object.cert.parent.get_absolute_url }}">{{ object.cert.parent.handle }}</a> + {% endif %} + </td> + </tr> + {% endfor %} + {% endif %} + </table> + <a class='btn' href="{% url "rpki.gui.app.views.refresh" %}" title="refresh resource list from rpkid"><i class="icon-refresh"></i> refresh</a></li> + </div> + <div class='span6'> + <h2>Unallocated Resources</h2> + <p>The following resources have not been allocated to a child, nor appear in a ROA. + + {% if unused_asns %} + <h3>ASNs</h3> + <ul> + {% for asn in unused_asns %} + <li>AS{{ asn }} + {% endfor %} <!-- ASNs --> + </ul> + {% endif %} + + {% if unused_prefixes %} + <h3>IPv4</h3> + <table class="table table-condensed table-striped"> + <tr><th>Prefix</th><th>Action</th></tr> + {% for addr in unused_prefixes %} + <tr> + <td>{{ addr }}</td> + <td> + <a class="btn btn-mini" title="Create ROA using this prefix" href="{% url "rpki.gui.app.views.roa_create_multi" %}?roa={{ addr }}"><i class="icon-plus-sign"></i> ROA</a> + </td> + </tr> + {% endfor %} <!-- addrs --> + </table> + {% endif %} + + {% if unused_prefixes_v6 %} + <h3>IPv6</h3> + <table class="table table-condensed table-striped"> + <tr><th>Prefix</th><th></th></tr> + {% for addr in unused_prefixes_v6 %} + <tr> + <td>{{ addr }}</td> + <td> + <a class="btn btn-mini" title='create roa using this prefix' href="{% url "rpki.gui.app.views.roa_create_multi" %}?roa={{ addr }}"><i class="icon-plus-sign"></i> ROA</a> + </td> + </tr> + {% endfor %} <!-- addrs --> + </table> + {% endif %} + + </div><!-- /span --> +</div><!-- /row --> + +<div class="row-fluid"> + <div class="span6"> +<div class="page-header"> + <h1>ROAs</h1> +</div> +<table class="table table-condensed table-striped"> + <tr><th>Prefix</th><th>Max Length</th><th>AS</th><th></th></tr> + {% for roa in conf.roas %} + <tr> + <!-- each roa request has a single roa request prefix object associated --> + <td>{{ roa.prefixes.all.0.as_roa_prefix }}</td> + <td>{{ roa.prefixes.all.0.max_prefixlen }}</td> + <td>{{ roa.asn }}</td> + <td> + <a class="btn btn-mini" href="{% url "rpki.gui.app.views.roa_detail" roa.pk %}" title="Detail"><i class="icon-info-sign"></i></a> + <a class="btn btn-mini" href="{% url "rpki.gui.app.views.roa_delete" roa.pk %}" title="Delete"><i class="icon-trash"></i></a> + <a class="btn btn-mini" href="{% url "roa-clone" roa.pk %}" title="create another ROA for this prefix"><i class="icon-repeat"></i></a> + </td> + </tr> + {% endfor %} +</table> +<a class="btn" href="{% url "rpki.gui.app.views.roa_create_multi" %}"><i class="icon-plus-sign"></i> Create</a> +<a class="btn" href="{% url "roa-import" %}" title="import a CSV file containing ROAs"><i class="icon-upload"></i> Import</a> +<a class="btn" href="{% url "roa-export" %}" title="download a CSV file containing ROAs"><i class="icon-download"></i> Export</a> +</div> + + <div class="span6"> +<div class="page-header"> + <h1>Ghostbusters</h1> +</div> +<table class="table table-condensed table-striped"> + <tr><th>Full Name</th><th>Organization</th><th>Email</th><th>Telephone</th><th></th></tr> + {% for gbr in conf.ghostbusters %} + <tr> + <td>{{ gbr.full_name }}</td> + <td>{{ gbr.organization }}</td> + <td>{{ gbr.email_address }}</td> + <td>{{ gbr.telephone }}</td> + <td> + <a class="btn btn-mini" href="{% url "gbr-detail" gbr.pk %}" title="View"><i class="icon-info-sign"></i></a> + <a class="btn btn-mini" href="{% url "gbr-edit" gbr.pk %}" title="Edit"><i class="icon-edit"></i></a> + <a class="btn btn-mini" href="{% url "gbr-delete" gbr.pk %}" title="Delete"><i class="icon-trash"></i></a> + </td> + </tr> + {% endfor %} +</table> +<a class="btn" href="{% url "gbr-create" %}"><i class="icon-plus-sign"></i> Create</a> +</div><!-- /span --> +</div><!-- /row --> + +<div class="row-fluid"> + <div class="span6"> + <div class="page-header"> + <h1>Children</h1> + </div> +<table class="table table-condensed table-striped"> + <tr><th>Handle</th><th></th> + {% for child in conf.children %} + <tr> + <td><a href="{{ child.get_absolute_url }}">{{ child.handle }}</a></td> + <td> + <a class="btn btn-mini" href="{% url "rpki.gui.app.views.child_delete" child.pk %}" title="Delete"><i class="icon-trash"></i></a> + </td> + </tr> + {% endfor %} + </table> + <div class="row-fluid"> + <div class='span6'> + <a class="btn" href="{% url "rpki.gui.app.views.child_import" %}" title="Import XML request from Child"><i class="icon-upload"></i> Child</a> + <a class="btn" href="{% url "import-asns" %}" title="Import CSV file containing ASN delgations to children"><i class="icon-upload"></i> ASNs</a> + <a class="btn" href="{% url "import-prefixes" %}" title="import CSV file containing prefix delgations to children"><i class="icon-upload"></i> Prefixes</a> + </div> + </div> + <div class="row-fluid"> + <div class='span6'> + <a class="btn" href="{% url "export-asns" %}" title="Export CSV file containing ASN delgations to children"><i class="icon-download"></i> ASNs</a> + <a class="btn" href="{% url "export-prefixes" %}" title="Export CSV file containing prefix delgations to children"><i class="icon-download"></i> Prefixes</a> + </div> + </div> + </div><!-- /span --> + <div class="span6"> + <div class="page-header"> + <h1>Parents</h1> + </div> + <table class="table table-condensed table-striped"> + <tr><th>Handle</th><th></th></tr> + {% for parent in conf.parents %} + <tr> + <td><a href="{{ parent.get_absolute_url }}">{{ parent.handle }}</a></td> + <td> + <a class="btn btn-mini" href="{% url "rpki.gui.app.views.parent_delete" parent.pk %}" title="Delete"><i class="icon-trash"></i></a> + </td> + </tr> + {% endfor %} + </table> + <a class="btn" href="{% url "rpki.gui.app.views.parent_import" %}"><i class="icon-upload"></i> Import</a> + </div><!-- /span --> +</div><!-- /row --> + +<div class="row-fluid"> + <div class="span6"> + <div class="page-header"> + <h1>Repositories</h1> + </div> +<table class="table table-condensed table-striped"> + <tr><th>Handle</th><th></th></tr> + {% for repo in conf.repositories %} + <tr> + <td><a href="{{ repo.get_absolute_url }}">{{ repo.handle }}</a></td> + <td> + <a class="btn btn-mini" href="{% url "rpki.gui.app.views.repository_delete" repo.pk %}" title="Delete"><i class="icon-trash"></i></a> + </td> + </tr> + {% endfor %} + </table> + <a class="btn" href="{% url "rpki.gui.app.views.repository_import" %}"><i class="icon-upload"></i> Import</a> + </div><!-- /span --> +</div><!-- /row --> +{% endblock %} diff --git a/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html b/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html new file mode 100644 index 00000000..76b1d25a --- /dev/null +++ b/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html @@ -0,0 +1,20 @@ +{% extends "app/ghostbuster_detail.html" %} + +{% block extra %} + +<div class='alert-message block-message warning'> + <p> + <strong>Please confirm</strong> that you really want to delete by clicking Delete. + + <div class='alert-actions'> + <form method='POST' action='{{ request.get_full_path }}'> + {% csrf_token %} + <input class='btn danger' type='submit' value='Delete' /> + <a class='btn' href='{{ object.get_absolute_url }}'>Cancel</a> + </form> + </div> +</div> + +{% endblock %} + +<!-- vim:set sw=2: --> diff --git a/rpki/gui/app/templates/app/ghostbusterrequest_detail.html b/rpki/gui/app/templates/app/ghostbusterrequest_detail.html new file mode 100644 index 00000000..296f0f16 --- /dev/null +++ b/rpki/gui/app/templates/app/ghostbusterrequest_detail.html @@ -0,0 +1,64 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Ghostbuster Request</h1> +</div> + +<table class='table table-striped table-condensed'> + <tr><td>Full Name</td><td>{{ object.full_name }}</td></tr> + + {% if object.honorific_prefix %} + <tr><td >Honorific Prefix</td><td>{{ object.honorific_prefix }}</td></tr> + {% endif %} + + {% if object.organization %} + <tr><td >Organization</td><td>{{ object.organization }}</td></tr> + {% endif %} + + {% if object.telephone %} + <tr><td >Telephone</td><td>{{ object.telephone }}</td></tr> + {% endif %} + + {% if object.email_address %} + <tr><td >Email</td><td>{{ object.email_address }}</td></tr> + {% endif %} + + {% if object.box %} + <tr><td >P.O. Box</td><td>{{ object.box }}</td></tr> + {% endif %} + + {% if object.extended %} + <tr><td >Extended Address</td><td>{{ object.extended }}</td></tr> + {% endif %} + + {% if object.street %} + <tr><td >Street Address</td><td>{{ object.street }}</td></tr> + {% endif %} + + {% if object.city %} + <tr><td >City</td><td>{{ object.city }}</td></tr> + {% endif %} + + {% if object.region %} + <tr><td >Region</td><td>{{ object.region }}</td></tr> + {% endif %} + + {% if object.code %} + <tr><td >Postal Code</td><td>{{ object.code }}</td></tr> + {% endif %} + + {% if object.country %} + <tr><td >Country</td><td>{{ object.country }}</td></tr> + {% endif %} + +</table> + +{% block action %} +{# the roarequest_confirm_delete template will override this section #} +<a class="btn" href="{% url "gbr-edit" object.pk %}"><i class="icon-edit"></i> Edit</a> +<a class="btn" href="{% url "gbr-delete" object.pk %}"><i class="icon-trash"></i> Delete</a> +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/import_resource_form.html b/rpki/gui/app/templates/app/import_resource_form.html new file mode 100644 index 00000000..e446d344 --- /dev/null +++ b/rpki/gui/app/templates/app/import_resource_form.html @@ -0,0 +1,9 @@ +{% extends "app/app_form.html" %} + +{% block form_info %} +<div class="alert alert-block alert-warning"> + <b>Warning!</b> All existing resources of this type currently in the + database <b>will be deleted</b> and replaced with the contents of the CSV + file you are uploading. +</div> +{% endblock form_info %} diff --git a/rpki/gui/app/templates/app/object_confirm_delete.html b/rpki/gui/app/templates/app/object_confirm_delete.html new file mode 100644 index 00000000..c4af9b26 --- /dev/null +++ b/rpki/gui/app/templates/app/object_confirm_delete.html @@ -0,0 +1,21 @@ +{% extends parent_template %} +{% comment %} +Since Django templates do not support multiple inheritance, we simluate it by +dynamically extending from the *_detail.html template for a concrete object +type. The *DeleteView classes should set a "parent_template" variable which is +string specifying the concrete template to inherit from. +{% endcomment %} +{% load url from future %} + +{% block action %} +<div class="alert alert-warning alert-block"> + <h4>Warning!</h4> + Please confirm that you would like to delete this object +</div> + +<form action='' method='POST'> + {% csrf_token %} + <input class='btn btn-danger' type='submit' value='Delete'> + <a class='btn' href="{% url "rpki.gui.app.views.dashboard" %}">Cancel</a> +</form> +{% endblock %} diff --git a/rpki/gui/app/templates/app/parent_detail.html b/rpki/gui/app/templates/app/parent_detail.html new file mode 100644 index 00000000..4dd1842f --- /dev/null +++ b/rpki/gui/app/templates/app/parent_detail.html @@ -0,0 +1,67 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Parent: {{ object.handle }}</h1> +</div> + +<table class="table table-striped table-condensed"> + <tr> + <td>service_uri</td> + <td>{{ object.service_uri }}</td> + </tr> + <tr> + <td>parent_handle</td> + <td>{{ object.parent_handle }}</td> + </tr> + <tr> + <td>child_handle</td> + <td>{{ object.child_handle }}</td> + </tr> + <tr> + <td>repository_type</td> + <td>{{ object.repository_type }}</td> + </tr> + <tr> + <td>referrer</td> + <td>{{ object.referrer }}</td> + </tr> + <tr> + <td>ta validity period</td> + <td>{{ object.ta.getNotBefore }} - {{ object.ta.getNotAfter }}</td> + </tr> +</table> + +<div class='row-fluid'> + <div class='span6'> + <h3>Delegated Addresses</h3> + <ul class='unstyled'> + {% for c in object.certs.all %} + {% for a in c.address_ranges.all %} + <li>{{ a }}</li> + {% endfor %} + {% for a in c.address_ranges_v6.all %} + <li>{{ a }}</li> + {% endfor %} + {% endfor %} + </ul> + </div> + <div class='span6'> + <h3>Delegated ASNs</h3> + <ul class='unstyled'> + {% for c in object.certs.all %} + {% for a in c.asn_ranges.all %} + <li>{{ a }}</li> + {% endfor %} + {% endfor %} + </ul> + </div> +</div> + +{% block action %} +<a class='btn' href='{% url "rpki.gui.app.views.parent_export" object.pk %}' title='Download XML to send to repository operator'><i class="icon-download"></i> Export</a> +<a class="btn" href="{% url "rpki.gui.app.views.parent_delete" object.pk %}" title="Delete this parent"><i class="icon-trash"></i> Delete</a> +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/pubclient_list.html b/rpki/gui/app/templates/app/pubclient_list.html new file mode 100644 index 00000000..1872e005 --- /dev/null +++ b/rpki/gui/app/templates/app/pubclient_list.html @@ -0,0 +1,10 @@ +{% extends "app/object_list.html" %} +{% load url from future %} + +{% block actions %} +<div class='actions'> + <a class='btn' href='{% url "rpki.gui.app.views.pubclient_import" %}'>Import</a> +</div> +{% endblock actions %} + +<!-- vim:set sw=2: --> diff --git a/rpki/gui/app/templates/app/repository_detail.html b/rpki/gui/app/templates/app/repository_detail.html new file mode 100644 index 00000000..92a43e54 --- /dev/null +++ b/rpki/gui/app/templates/app/repository_detail.html @@ -0,0 +1,19 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Repository: {{ object.handle }}</h1> +</div> + +<table class="table"> + <tr> + <td><strong>SIA</strong></td> + <td>{{ object.sia_base }}</td> + </tr> +</table> + +{% block action %} +<a class="btn" href="{% url "rpki.gui.app.views.repository_delete" object.pk %}" title="Delete this repository"><i class="icon-trash"></i> Delete</a> +{% endblock action %} +{% endblock content %} diff --git a/rpki/gui/app/templates/app/resource_holder_list.html b/rpki/gui/app/templates/app/resource_holder_list.html new file mode 100644 index 00000000..6525e74d --- /dev/null +++ b/rpki/gui/app/templates/app/resource_holder_list.html @@ -0,0 +1,37 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class='page-header'> + <h1>Resource Holders</h1> +</div> + +<p> +This page lists all of the resource holders that are currently managed by this server. +Note that this is distinct from the +<a href="{% url "rpki.gui.app.views.user_list" %}">list of web interface users</a>. +</p> + +<table class='table table-striped'> + <thead> + <tr> + <th>Handle</th> + <th>Action</th> + </tr> + </thead> + <tbody> + {% for conf in object_list %} + <tr> + <td>{{ conf.handle }}</td> + <td> + <a class='btn btn-small' href='{% url "rpki.gui.app.views.resource_holder_edit" conf.pk %}' title="Edit"><i class="icon-edit"></i></a> + <a class='btn btn-small' href='{% url "rpki.gui.app.views.resource_holder_delete" conf.pk %}' title="Delete"><i class="icon-trash"></i></a> + </td> + </tr> + {% endfor %} + </tbody> +</table> + +<a class="btn" href="{% url "rpki.gui.app.views.resource_holder_create" %}"><i class="icon-plus-sign"></i> Create</a> +{% endblock content %} +{# vim: set ft=htmldjango: #} diff --git a/rpki/gui/app/templates/app/roa_detail.html b/rpki/gui/app/templates/app/roa_detail.html new file mode 100644 index 00000000..ec76579d --- /dev/null +++ b/rpki/gui/app/templates/app/roa_detail.html @@ -0,0 +1,40 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} +<div class="page-header"> + <h1>ROA Detail</h1> +</div> + +<div class="row-fluid"> + <div class="span6 well"> + <table class="table"> + <tr><th>Prefix</th><th>Max Length</th><th>AS</th></tr> + <tr> + <td>{{ object.prefixes.all.0.as_roa_prefix }}</td> + <td>{{ object.prefixes.all.0.max_prefixlen }}</td> + <td>{{ object.asn }}</td> + </tr> + </table> + </div> + + <div class="span6"> + <h3>Covered Routes</h3> + <p>This table lists currently announced routes which are covered by prefixes included in this ROA. + <table class="table"> + <tr><th>Prefix</th><th>AS</th><th>Validity</th></tr> + {% for r in object.routes %} + <tr> + <td>{{ r.as_resource_range }}</td> + <td>{{ r.asn }}</td> + <td>{% validity_label r.status %}</td> + <td><a href="{{ r.get_absolute_url }}" title="view route detail"><i class="icon-info-sign"></i></a></td> + </tr> + {% endfor %} + </table> + </div> +</div> + +<a class="btn" href="{% url "rpki.gui.app.views.roa_delete" object.pk %}"><i class="icon-trash"></i> Delete</a> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_confirm_delete.html b/rpki/gui/app/templates/app/roarequest_confirm_delete.html new file mode 100644 index 00000000..7dc3ec2b --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_confirm_delete.html @@ -0,0 +1,59 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} +<div class='page-header'> + <h1>Delete ROA Request</h1> +</div> + +<div class='row-fluid'> + <div class='span6'> + <div class='alert alert-block alert-warning'> + <p><strong>Please confirm</strong> that you would like to delete the + following ROA Request. The table to the right indicates how validation + status for matching routes may change. + </div> + + <table class='table'> + <tr> + <th>Prefix</th> + <td>{{ object.prefixes.all.0.as_roa_prefix }}</td> + </tr> + <tr> + <th>Max Length</th> + <td>{{ object.prefixes.all.0.max_prefixlen }}</td> + </tr> + <tr> + <th>AS</th> + <td>{{ object.asn }}</td> + </tr> + </table> + + <form method='POST' action='{{ request.get_full_path }}'> + {% csrf_token %} + <input class='btn btn-danger' type='submit' value='Delete'/> + <a class='btn' href="{% url "rpki.gui.app.views.dashboard" %}">Cancel</a> + </form> + </div> + + <div class='span6'> + <h2>Matching Routes</h2> + + <table class='table table-striped table-condensed'> + <tr> + <th>Prefix</th> + <th>Origin AS</th> + <th>Validation Status</th> + </tr> + {% for r in routes %} + <tr> + <td>{{ r.get_prefix_display }}</td> + <td>{{ r.asn }}</td> + <td>{% validity_label r.newstatus %}</td> + </tr> + {% endfor %} + </table> + </div><!-- /span8 --> +</div><!-- /row --> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_confirm_form.html b/rpki/gui/app/templates/app/roarequest_confirm_form.html new file mode 100644 index 00000000..446bb6a4 --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_confirm_form.html @@ -0,0 +1,60 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class='page-title'> + <h1>Confirm ROA Request</h1> +</div> + +<div class='row-fluid'> + <div class='span6'> + <div class='alert alert-block-message alert-warning'> + <p><strong>Please confirm</strong> that you would like to create the following ROA. + The accompanying table indicates how the validation status may change as a result. + </div> + + <table class='table table-condensed table-striped'> + <tr> + <th>AS</th> + <th>Prefix</th> + <th>Max Length</th> + </tr> + <tr> + <td>{{ asn }}</td> + <td>{{ prefix }}</td> + <td>{{ max_prefixlen }}</td> + </tr> + </table> + + <form method='POST' action='{% url "rpki.gui.app.views.roa_create_confirm" %}'> + {% csrf_token %} + {% include "app/bootstrap_form.html" %} + + <div class='form-actions'> + <input class='btn btn-primary' type='submit' value='Create'/> + <a class='btn' href='{% url "rpki.gui.app.views.dashboard" %}'>Cancel</a> + </div> + </form> + </div> + + <div class='span6'> + <h2>Matched Routes</h2> + + <table class='table table-striped table-condensed'> + <tr> + <th>Prefix</th> + <th>Origin AS</th> + <th>Validation Status</th> + </tr> + {% for r in routes %} + <tr> + <td>{{ r.get_prefix_display }}</td> + <td>{{ r.asn }}</td> + <td><span class='label {{ r.status_label }}'>{{ r.status }}</span></td> + </tr> + {% endfor %} + </table> + </div> + +</div> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html b/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html new file mode 100644 index 00000000..4a06a4aa --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html @@ -0,0 +1,66 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} +<div class='page-title'> + <h1>Confirm ROA Requests</h1> +</div> + +<div class='row-fluid'> + <div class='span6'> + <div class='alert alert-block-message alert-warning'> + <p><strong>Please confirm</strong> that you would like to create the following ROA(s). + The accompanying table indicates how the validation status may change as a result. + </div> + + <table class='table table-condensed table-striped'> + <tr> + <th>Prefix</th> + <th>Max Length</th> + <th>AS</th> + </tr> + {% for roa in roas %} + <tr> + <td>{{ roa.prefix }}</td> + <td>{{ roa.max_prefixlen }}</td> + <td>{{ roa.asn }}</td> + </tr> + {% endfor %} + </table> + + <form method='POST' action='{% url "rpki.gui.app.views.roa_create_multi_confirm" %}'> + {% csrf_token %} + {{ formset.management_form }} + {% for form in formset %} + {% include "app/bootstrap_form.html" %} + {% endfor %} + + <div class='form-actions'> + <input class='btn btn-primary' type='submit' value='Create'/> + <a class='btn' href='{% url "rpki.gui.app.views.dashboard" %}'>Cancel</a> + </div> + </form> + </div> + + <div class='span6'> + <h2>Matched Routes</h2> + + <table class='table table-striped table-condensed'> + <tr> + <th>Prefix</th> + <th>Origin AS</th> + <th>Validation Status</th> + </tr> + {% for r in routes %} + <tr> + <td>{{ r.get_prefix_display }}</td> + <td>{{ r.asn }}</td> + <td>{% validity_label r.newstatus %}</td> + </tr> + {% endfor %} + </table> + </div> + +</div> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_form.html b/rpki/gui/app/templates/app/roarequest_form.html new file mode 100644 index 00000000..3a29131d --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_form.html @@ -0,0 +1,50 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{# This form is used for creating a new ROA request #} + +{% block content %} +<div class='page-title'> + <h1>Create ROA</h1> +</div> + +<script src='{{ STATIC_URL }}js/jquery-1.8.3.min.js'></script> +<script type='text/javascript'> + var f = function(){ + var e = $("#route_table") + e.empty() + e.append('<tr><th>Prefix</th><th>AS</th></tr>') + $.getJSON('/api/v1/route/', {'prefix__in':$(this).val()}, function(data){ + if (data.length == 1) { + $("#id_asn").val(data[0].asn) + } + for (var x in data) { + e.append('<tr><td>' + data[x].prefix + '</td><td>' + data[x].asn + '</td></tr>') + } + }) + } + + $(document).ready(function(){ $("#id_prefix").change(f) }) +</script> + +<div class='row-fluid'> + <div class='span6'> + <form method='POST' action='{{ request.get_full_path }}'> + {% csrf_token %} + {% include "app/bootstrap_form.html" %} + <div class="form-actions"> + <input class="btn" type="submit" value="Preview"> + <a class="btn" href="{% url "rpki.gui.app.views.dashboard" %}">Cancel</a> + </div> + </form> + </div> + + <div class='span6'> + Routes matching your prefix: + <table class='table table-condensed table-striped' id='route_table'> + <tr><th>Prefix</th><th>AS</th></tr> + <!-- script above populates this table based upon prefix matches --> + </table> + </div> +</div><!--row--> +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_multi_form.html b/rpki/gui/app/templates/app/roarequest_multi_form.html new file mode 100644 index 00000000..06d07943 --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_multi_form.html @@ -0,0 +1,28 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class='page-title'> + <h1>Create ROAs</h1> +</div> + +<form method='POST' action='{{ request.get_full_path }}'> + {% csrf_token %} + {{ formset.management_form }} + {% for form in formset %} + <div class="controls controls-row"> + {{ form.prefix }} + {{ form.max_prefixlen }} + {{ form.asn }} + <label class="checkbox inline span1">{{ form.DELETE }} Delete</label> + {% if form.errors %}<span class="help-inline">{{ form.errors }}</span>{% endif %} + {% if form.non_field_errors %}<span class="help-inline">{{ form.non_field_errors }}</span>{% endif %} + </div> + {% endfor %} + + <div class="form-actions"> + <input class="btn" type="submit" value="Preview"> + <a class="btn" href="{% url "rpki.gui.app.views.dashboard" %}">Cancel</a> + </div> +</form> +{% endblock %} diff --git a/rpki/gui/app/templates/app/route_detail.html b/rpki/gui/app/templates/app/route_detail.html new file mode 100644 index 00000000..84add4a8 --- /dev/null +++ b/rpki/gui/app/templates/app/route_detail.html @@ -0,0 +1,58 @@ +{% extends "app/app_base.html" %} +{% load app_extras %} +{% load bootstrap_pager %} + +{# template for displaying the list of ROAs covering a specific route #} + +{% block content %} +<div class="page-header"> + <h1>Route Detail</h1> +</div> + +<div class="row-fluid"> + <div class="span12 well"> + <table class="table table-striped table-condensed"> + <thead> + <tr><th>Prefix</th><th>AS</th><th>Validity</th></tr> + </thead> + <tbody> + <tr> + <td>{{ object.as_resource_range }}</td> + <td>{{ object.asn }}</td> + <td>{% validity_label object.status %}</td> + </tr> + </tbody> + </table> + </div> +</div> + +<div class="row-fluid"> + <div class="span12"> + <p>The table below lists all ROAs which cover the route described above. + + <table class="table table-striped table-condensed"> + <thead> + <tr> + <th>Prefix</th> + <th>Max Length</th> + <th>ASN</th> + <th>Expires</th> + <th>URI</th> + </tr> + </thead> + <tbody> + {% for pfx in roa_prefixes %} + <tr> + <td>{{ pfx.as_resource_range }}</td> + <td>{{ pfx.max_length }}</td> + <td>{{ pfx.roas.all.0.asid }}</td> + <td>{{ pfx.roas.all.0.not_after }}</td> + <td>{{ pfx.roas.all.0.repo.uri }}</td> + </tr> + {% endfor %} + </tbody> + </table> + {% bootstrap_pager request roa_prefixes %} + </div> +</div> +{% endblock %} diff --git a/rpki/gui/app/templates/app/routes_view.html b/rpki/gui/app/templates/app/routes_view.html new file mode 100644 index 00000000..885f3fa9 --- /dev/null +++ b/rpki/gui/app/templates/app/routes_view.html @@ -0,0 +1,55 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load bootstrap_pager %} +{% load app_extras %} + +{% block sidebar_extra %} +<li class="nav-header">BGP data updated</li> +<li>IPv4: {{ timestamp.bgp_v4_import.isoformat }}</li> +<li>IPv6: {{ timestamp.bgp_v6_import.isoformat }}</li> +<li class="nav-header">rcynic cache updated</li> +<li>{{ timestamp.rcynic_import.isoformat }}</li> +{% endblock sidebar_extra %} + +{% block content %} + +<div class='page-header'> + <h1>Route View</h1> +</div> + +<p> +This view shows currently advertised routes for the prefixes listed in resource certs received from RPKI parents. + +<form method="POST" action="{% url "suggest-roas" %}"> + {% csrf_token %} + <table class='table table-striped table-condensed'> + <thead> + <tr> + <th></th> + <th>Prefix</th> + <th>Origin AS</th> + <th>Validation Status</th> + </tr> + </thead> + <tbody> + {% for r in routes %} + <tr> + <td><input type="checkbox" name="pk-{{ r.pk }}"></td> + <td>{{ r.get_prefix_display }}</td> + <td>{{ r.asn }}</td> + <td> + {% validity_label r.status %} + <a href='{% url "rpki.gui.app.views.route_detail" r.pk %}' title='display ROAs covering this prefix'><i class="icon-info-sign"></i></a> + </td> + </tr> + {% endfor %} + </tbody> + </table> + <div class="form-actions"> + <button type="submit" title="create ROAs for selected routes"><i class="icon-plus-sign"></i> Create ROAs</button> + </div> +</form> + +{% bootstrap_pager request routes %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/user_list.html b/rpki/gui/app/templates/app/user_list.html new file mode 100644 index 00000000..1b419ded --- /dev/null +++ b/rpki/gui/app/templates/app/user_list.html @@ -0,0 +1,37 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +<div class='page-header'> + <h1>Users</h1> +</div> + +<p> +This page lists all user accounts in the web interface. Note that this is distinct from the +<a href="{% url "rpki.gui.app.views.resource_holder_list" %}">list of resource holders</a>. +</p> + +<table class='table table-striped'> + <thead> + <tr> + <th>Username</th> + <th>Email</th> + <th></th> + </tr> + </thead> + <tbody> + {% for user in object_list %} + <tr> + <td>{{ user.username }}</td> + <td>{{ user.email }}</td> + <td> + <a class='btn btn-small' href='{% url "rpki.gui.app.views.user_edit" user.pk %}' title="Edit"><i class="icon-edit"></i></a> + <a class='btn btn-small' href='{% url "rpki.gui.app.views.user_delete" user.pk %}' title="Delete"><i class="icon-trash"></i></a> + </td> + </tr> + {% endfor %} + </tbody> +</table> + +<a class='btn' href="{% url "rpki.gui.app.views.user_create" %}" title="create a new locally hosted resource handle"><i class="icon-plus-sign"></i> Create</a> +{% endblock content %} diff --git a/rpki/gui/app/templates/base.html b/rpki/gui/app/templates/base.html new file mode 100644 index 00000000..08d0c112 --- /dev/null +++ b/rpki/gui/app/templates/base.html @@ -0,0 +1,63 @@ +{% load url from future %} +{% load app_extras %} + +<!DOCTYPE HTML> +<html lang="en"> + <head> + <meta name='Content-Type' content='text/html; charset=UTF-8'> + <title>{% block title %}RPKI {% if request.session.handle %}: {{ request.session.handle }}{% endif %}{% endblock %}</title> + {% block head %}{% endblock %} + <link rel="stylesheet" href="{{ STATIC_URL }}css/bootstrap.min.css" media="screen"> + <link rel="icon" href="{{ STATIC_URL }}img/sui-riu.ico" type="image/x-icon"> + <link rel="shortcut icon" href="{{ STATIC_URL }}img/sui-riu.ico" type="image/x-icon"> + <style type="text/css"> + body { padding: 40px } + </style> + </head> + <body> + + <!-- TOP BAR --> + <div class="container"> + <div class="navbar navbar-inverse navbar-fixed-top"> + <div class="navbar-inner"> + <a class="brand" href="#">rpki.net {% rpki_version %}</a> + <ul class='nav'> + <li class="active"> + <a href="#">Home</a> + </li> + <li class="divider-vertical"></li> + {% if user.is_authenticated %} + <li><p class="navbar-text">Logged in as {{ user }}</li> + <li class="divider-vertical"></li> + <li><a href="{% url "rpki.gui.views.logout" %}">Log Out</a></li> + {% endif %} + </ul> + <ul class="nav pull-right"> + <li><a href="https://trac.rpki.net/wiki/doc/RPKI/CA/UI/GUI">Help</a></li> + </ul> + </div> + </div> + </div><!-- topbar --> + + <div class="container-fluid"> + <!-- MAIN CONTENT --> + <div class="row-fluid"> + <div class="span2"> + {% block sidebar %}{% endblock %} + </div> + + <div class="span10"> + {% if messages %} + {% for message in messages %} + {# this will break if there is more than one tag, but don't expect to use that feature #} + <div class='alert alert-{{ message.tags }}'> + {{ message }} + </div> + {% endfor %} + {% endif %} + {% block content %}{% endblock %} + </div> + </div> + + </body> +</html> diff --git a/rpki/gui/app/templates/registration/login.html b/rpki/gui/app/templates/registration/login.html new file mode 100644 index 00000000..0d6fb6fd --- /dev/null +++ b/rpki/gui/app/templates/registration/login.html @@ -0,0 +1,25 @@ +{% extends "base.html" %} +{% load url from future %} + +{% block content %} +<div class="page-header"> + <h1>Login</h1> +</div> + +{% if form.errors %} +<div class='alert'> + <p>Your username and password didn't match. Please try again.</p> +</div> +{% endif %} + +<form class="form-horizontal" method="post" action="{% url "rpki.gui.views.login" %}"> + {% csrf_token %} + {% include "app/bootstrap_form.html" %} + + <input type="hidden" name="next" value="{{ next }}" /> + <div class="form-actions"> + <input type="submit" value="Login" class="btn btn-primary" /> + </div> +</form> + +{% endblock %} diff --git a/rpki/gui/app/templatetags/__init__.py b/rpki/gui/app/templatetags/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/app/templatetags/__init__.py diff --git a/rpki/gui/app/templatetags/app_extras.py b/rpki/gui/app/templatetags/app_extras.py new file mode 100644 index 00000000..2bde9bc2 --- /dev/null +++ b/rpki/gui/app/templatetags/app_extras.py @@ -0,0 +1,58 @@ +from django import template + +register = template.Library() + + +@register.simple_tag +def verbose_name(obj): + "Return the model class' verbose name." + return obj._meta.verbose_name.capitalize() + + +@register.simple_tag +def verbose_name_plural(qs): + "Return the verbose name for the model class." + return qs.model._meta.verbose_name_plural.capitalize() + +css = { + 'valid': 'label-success', + 'invalid': 'label-important' +} + + +@register.simple_tag +def validity_label(validity): + return '<span class="label %s">%s</span>' % (css.get(validity, ''), validity) + + +@register.simple_tag +def severity_class(severity): + css = { + 0: 'label-info', + 1: 'label-warning', + 2: 'label-important', + } + return css.get(severity) + + +@register.simple_tag +def alert_count(conf): + qs = conf.alerts.filter(seen=False) + unread = len(qs) + if unread: + severity = max([x.severity for x in qs]) + css = { + 0: 'badge-info', + 1: 'badge-warning', + 2: 'badge-important' + } + css_class = css.get(severity) + else: + css_class = 'badge-default' + return u'<span class="badge %s">%d</span>' % (css_class, unread) + + +@register.simple_tag +def rpki_version(): + import rpki.version + return rpki.version.VERSION diff --git a/rpki/gui/app/templatetags/bootstrap_pager.py b/rpki/gui/app/templatetags/bootstrap_pager.py new file mode 100644 index 00000000..bae8445a --- /dev/null +++ b/rpki/gui/app/templatetags/bootstrap_pager.py @@ -0,0 +1,55 @@ +from django import template + +register = template.Library() + + +class BootstrapPagerNode(template.Node): + def __init__(self, request, pager_object): + self.request = template.Variable(request) + self.pager_object = template.Variable(pager_object) + + def render(self, context): + request = self.request.resolve(context) + pager_object = self.pager_object.resolve(context) + if pager_object.paginator.num_pages == 1: + return '' + r = ['<div class="pagination"><ul>'] + if pager_object.number == 1: + r.append('<li class="disabled"><a>«</a></li>') + else: + r.append('<li><a href="%s?page=%d">«</a></li>' % (request.path, pager_object.number - 1)) + + # display at most 5 pages around the current page + min_page = max(pager_object.number - 2, 1) + max_page = min(min_page + 5, pager_object.paginator.num_pages) + + if min_page > 1: + r.append('<li><a href="%s">1</a></li>' % request.path) + r.append('<li class="disabled"><a>…</a></li>') + + for i in range(min_page, max_page + 1): + r.append('<li %s><a href="%s?page=%d">%d</a></li>' % ('' if i != pager_object.number else 'class="active"', request.path, i, i)) + + if max_page < pager_object.paginator.num_pages: + r.append('<li class="disabled"><a>…</a></li>') + r.append('<li><a href="%(path)s?page=%(page)d">%(page)d</a></li>' % + {'path': request.path, + 'page': pager_object.paginator.num_pages}) + + if pager_object.number < pager_object.paginator.num_pages: + r.append('<li><a href="%s?page=%d">»</a></li>' % (request.path, pager_object.number + 1)) + else: + r.append('<li class="disabled"><a>»</a></li>') + + + r.append('</ul></div>') + return '\n'.join(r) + + +@register.tag +def bootstrap_pager(parser, token): + try: + tag_name, request, pager_object = token.split_contents() + except ValueError: + raise template.TemplateSyntaxError("%r tag requires two arguments" % token.contents.split()[0]) + return BootstrapPagerNode(request, pager_object) diff --git a/rpki/gui/app/timestamp.py b/rpki/gui/app/timestamp.py new file mode 100644 index 00000000..959f2025 --- /dev/null +++ b/rpki/gui/app/timestamp.py @@ -0,0 +1,25 @@ +# $Id$ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# + +import models +from datetime import datetime + +def update(name): + "Set the timestamp value for the given name to the current time." + q = models.Timestamp.objects.filter(name=name) + obj = q[0] if q else models.Timestamp(name=name) + obj.ts = datetime.utcnow() + obj.save() diff --git a/rpki/gui/app/urls.py b/rpki/gui/app/urls.py new file mode 100644 index 00000000..92e90b0e --- /dev/null +++ b/rpki/gui/app/urls.py @@ -0,0 +1,81 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls import patterns, url +from rpki.gui.app import views + +urlpatterns = patterns( + '', + (r'^$', views.dashboard), + url(r'^alert/$', views.AlertListView.as_view(), name='alert-list'), + url(r'^alert/clear_all$', views.alert_clear_all, name='alert-clear-all'), + url(r'^alert/(?P<pk>\d+)/$', views.AlertDetailView.as_view(), + name='alert-detail'), + url(r'^alert/(?P<pk>\d+)/delete$', views.AlertDeleteView.as_view(), + name='alert-delete'), + (r'^conf/export$', views.conf_export), + (r'^conf/list$', views.conf_list), + (r'^conf/select$', views.conf_select), + url(r'^conf/export_asns$', views.export_asns, name='export-asns'), + url(r'^conf/export_prefixes$', views.export_prefixes, name='export-prefixes'), + url(r'^conf/import_asns$', views.import_asns, name='import-asns'), + url(r'^conf/import_prefixes$', views.import_prefixes, name='import-prefixes'), + (r'^parent/import$', views.parent_import), + (r'^parent/(?P<pk>\d+)/$', views.parent_detail), + (r'^parent/(?P<pk>\d+)/delete$', views.parent_delete), + (r'^parent/(?P<pk>\d+)/export$', views.parent_export), + (r'^child/import$', views.child_import), + (r'^child/(?P<pk>\d+)/$', views.child_detail), + (r'^child/(?P<pk>\d+)/add_address$', views.child_add_prefix), + (r'^child/(?P<pk>\d+)/add_asn$', views.child_add_asn), + (r'^child/(?P<pk>\d+)/delete$', views.child_delete), + (r'^child/(?P<pk>\d+)/edit$', views.child_edit), + (r'^child/(?P<pk>\d+)/export$', views.child_response), + url(r'^gbr/create$', views.ghostbuster_create, name='gbr-create'), + url(r'^gbr/(?P<pk>\d+)/$', views.GhostbusterDetailView.as_view(), name='gbr-detail'), + url(r'^gbr/(?P<pk>\d+)/edit$', views.ghostbuster_edit, name='gbr-edit'), + url(r'^gbr/(?P<pk>\d+)/delete$', views.ghostbuster_delete, name='gbr-delete'), + (r'^refresh$', views.refresh), + (r'^client/import$', views.client_import), + (r'^client/$', views.client_list), + (r'^client/(?P<pk>\d+)/$', views.client_detail), + (r'^client/(?P<pk>\d+)/delete$', views.client_delete), + url(r'^client/(?P<pk>\d+)/export$', views.client_export, name='client-export'), + (r'^repo/import$', views.repository_import), + (r'^repo/(?P<pk>\d+)/$', views.repository_detail), + (r'^repo/(?P<pk>\d+)/delete$', views.repository_delete), + (r'^resource_holder/$', views.resource_holder_list), + (r'^resource_holder/create$', views.resource_holder_create), + (r'^resource_holder/(?P<pk>\d+)/delete$', views.resource_holder_delete), + (r'^resource_holder/(?P<pk>\d+)/edit$', views.resource_holder_edit), + (r'^roa/(?P<pk>\d+)/$', views.roa_detail), + (r'^roa/create$', views.roa_create), + (r'^roa/create_multi$', views.roa_create_multi), + (r'^roa/confirm$', views.roa_create_confirm), + (r'^roa/confirm_multi$', views.roa_create_multi_confirm), + url(r'^roa/export$', views.roa_export, name='roa-export'), + url(r'^roa/import$', views.roa_import, name='roa-import'), + (r'^roa/(?P<pk>\d+)/delete$', views.roa_delete), + url(r'^roa/(?P<pk>\d+)/clone$', views.roa_clone, name="roa-clone"), + (r'^route/$', views.route_view), + (r'^route/(?P<pk>\d+)/$', views.route_detail), + url(r'^route/suggest$', views.route_suggest, name="suggest-roas"), + (r'^user/$', views.user_list), + (r'^user/create$', views.user_create), + (r'^user/(?P<pk>\d+)/delete$', views.user_delete), + (r'^user/(?P<pk>\d+)/edit$', views.user_edit), +) diff --git a/rpki/gui/app/views.py b/rpki/gui/app/views.py new file mode 100644 index 00000000..db4cf0c1 --- /dev/null +++ b/rpki/gui/app/views.py @@ -0,0 +1,1314 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This module contains the view functions implementing the web portal +interface. + +""" + +__version__ = '$Id$' + +import os +import os.path +from tempfile import NamedTemporaryFile +import cStringIO +import csv +import logging + +from django.utils.decorators import method_decorator +from django.contrib.auth.decorators import login_required +from django.shortcuts import get_object_or_404, render, redirect +from django.utils.http import urlquote +from django import http +from django.core.urlresolvers import reverse, reverse_lazy +from django.contrib.auth.models import User +from django.views.generic import DetailView, ListView, DeleteView +from django.core.paginator import Paginator, InvalidPage +from django.forms.formsets import formset_factory, BaseFormSet +import django.db.models +from django.contrib import messages + +from rpki.irdb import Zookeeper, ChildASN, ChildNet, ROARequestPrefix +from rpki.gui.app import models, forms, glue, range_list +from rpki.resource_set import (resource_range_as, resource_range_ip, + roa_prefix_ipv4) +from rpki import sundial +import rpki.exceptions + +from rpki.gui.cacheview.models import ROA +from rpki.gui.routeview.models import RouteOrigin +from rpki.gui.decorators import tls_required + +logger = logging.getLogger(__name__) + + +def superuser_required(f): + """Decorator which returns HttpResponseForbidden if the user does + not have superuser permissions. + + """ + @login_required + def _wrapped(request, *args, **kwargs): + if not request.user.is_superuser: + return http.HttpResponseForbidden() + return f(request, *args, **kwargs) + return _wrapped + + +def handle_required(f): + """Decorator for view functions which require the user to be logged in and + a resource handle selected for the session. + + """ + @login_required + @tls_required + def wrapped_fn(request, *args, **kwargs): + if 'handle' not in request.session: + if request.user.is_superuser: + conf = models.Conf.objects.all() + else: + conf = models.Conf.objects.filter(confacl__user=request.user) + + if conf.count() == 1: + request.session['handle'] = conf[0] + elif conf.count() == 0: + return render(request, 'app/conf_empty.html', {}) + else: + url = '%s?next=%s' % (reverse(conf_list), + urlquote(request.get_full_path())) + return http.HttpResponseRedirect(url) + + return f(request, *args, **kwargs) + return wrapped_fn + + +@handle_required +def generic_import(request, queryset, configure, form_class=None, + post_import_redirect=None): + """ + Generic view function for importing XML files used in the setup + process. + + queryset + queryset containing all objects of the type being imported + + configure + method on Zookeeper to invoke with the imported XML file + + form_class + specifies the form to use for import. If None, uses the generic + forms.ImportForm. + + post_import_redirect + if None (default), the user will be redirected to the detail page for + the imported object. Otherwise, the user will be redirected to the + specified URL. + + """ + conf = request.session['handle'] + if form_class is None: + form_class = forms.ImportForm + if request.method == 'POST': + form = form_class(request.POST, request.FILES) + if form.is_valid(): + tmpf = NamedTemporaryFile(prefix='import', suffix='.xml', + delete=False) + tmpf.write(form.cleaned_data['xml'].read()) + tmpf.close() + z = Zookeeper(handle=conf.handle) + handle = form.cleaned_data.get('handle') + # CharField uses an empty string for the empty value, rather than + # None. Convert to none in this case, since configure_child/parent + # expects it. + if handle == '': + handle = None + # configure_repository returns None, so can't use tuple expansion + # here. Unpack the tuple below if post_import_redirect is None. + r = configure(z, tmpf.name, handle) + # force rpkid run now + z.synchronize_ca(poke=True) + os.remove(tmpf.name) + if post_import_redirect: + url = post_import_redirect + else: + _, handle = r + url = queryset.get(issuer=conf, + handle=handle).get_absolute_url() + return http.HttpResponseRedirect(url) + else: + form = form_class() + + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Import ' + queryset.model._meta.verbose_name.capitalize(), + }) + + +@handle_required +def dashboard(request): + conf = request.session['handle'] + + used_asns = range_list.RangeList() + + # asns used in my roas + qs = models.ROARequest.objects.filter(issuer=conf) + roa_asns = set((obj.asn for obj in qs)) + used_asns.extend((resource_range_as(asn, asn) for asn in roa_asns)) + + # asns given to my children + child_asns = ChildASN.objects.filter(child__in=conf.children.all()) + used_asns.extend((resource_range_as(obj.start_as, obj.end_as) for obj in child_asns)) + + # my received asns + asns = models.ResourceRangeAS.objects.filter(cert__conf=conf) + my_asns = range_list.RangeList([resource_range_as(obj.min, obj.max) for obj in asns]) + + unused_asns = my_asns.difference(used_asns) + + used_prefixes = range_list.RangeList() + used_prefixes_v6 = range_list.RangeList() + + # prefixes used in my roas + for obj in models.ROARequestPrefix.objects.filter(roa_request__issuer=conf, + version='IPv4'): + used_prefixes.append(obj.as_resource_range()) + + for obj in models.ROARequestPrefix.objects.filter(roa_request__issuer=conf, + version='IPv6'): + used_prefixes_v6.append(obj.as_resource_range()) + + # prefixes given to my children + for obj in ChildNet.objects.filter(child__in=conf.children.all(), + version='IPv4'): + used_prefixes.append(obj.as_resource_range()) + + for obj in ChildNet.objects.filter(child__in=conf.children.all(), + version='IPv6'): + used_prefixes_v6.append(obj.as_resource_range()) + + # my received prefixes + prefixes = models.ResourceRangeAddressV4.objects.filter(cert__conf=conf).all() + prefixes_v6 = models.ResourceRangeAddressV6.objects.filter(cert__conf=conf).all() + my_prefixes = range_list.RangeList([obj.as_resource_range() for obj in prefixes]) + my_prefixes_v6 = range_list.RangeList([obj.as_resource_range() for obj in prefixes_v6]) + + unused_prefixes = my_prefixes.difference(used_prefixes) + # monkey-patch each object with a boolean value indicating whether or not + # it is a prefix. We have to do this here because in the template there is + # no way to catch the MustBePrefix exception. + for x in unused_prefixes: + try: + x.prefixlen() + x.is_prefix = True + except rpki.exceptions.MustBePrefix: + x.is_prefix = False + + unused_prefixes_v6 = my_prefixes_v6.difference(used_prefixes_v6) + for x in unused_prefixes_v6: + try: + x.prefixlen() + x.is_prefix = True + except rpki.exceptions.MustBePrefix: + x.is_prefix = False + + clients = models.Client.objects.all() if request.user.is_superuser else None + + return render(request, 'app/dashboard.html', { + 'conf': conf, + 'unused_asns': unused_asns, + 'unused_prefixes': unused_prefixes, + 'unused_prefixes_v6': unused_prefixes_v6, + 'asns': asns, + 'prefixes': prefixes, + 'prefixes_v6': prefixes_v6, + 'clients': clients, + }) + + +@login_required +def conf_list(request, **kwargs): + """Allow the user to select a handle.""" + log = request.META['wsgi.errors'] + next_url = request.GET.get('next', reverse(dashboard)) + if request.user.is_superuser: + qs = models.Conf.objects.all() + else: + qs = models.Conf.objects.filter(confacl__user=request.user) + return render(request, 'app/conf_list.html', { + 'conf_list': qs, + 'next_url': next_url + }) + + +@login_required +def conf_select(request): + """Change the handle for the current session.""" + if not 'handle' in request.GET: + return redirect(conf_list) + handle = request.GET['handle'] + next_url = request.GET.get('next', reverse(dashboard)) + if request.user.is_superuser: + request.session['handle'] = get_object_or_404(models.Conf, handle=handle) + else: + request.session['handle'] = get_object_or_404( + models.Conf, confacl__user=request.user, handle=handle + ) + return http.HttpResponseRedirect(next_url) + + +def serve_xml(content, basename, ext='xml'): + """ + Generate a HttpResponse object with the content type set to XML. + + `content` is a string. + + `basename` is the prefix to specify for the XML filename. + + `csv` is the type (default: xml) + + """ + resp = http.HttpResponse(content, mimetype='application/%s' % ext) + resp['Content-Disposition'] = 'attachment; filename=%s.%s' % (basename, ext) + return resp + + +@handle_required +def conf_export(request): + """Return the identity.xml for the current handle.""" + conf = request.session['handle'] + z = Zookeeper(handle=conf.handle) + xml = z.generate_identity() + return serve_xml(str(xml), '%s.identity' % conf.handle) + + +@handle_required +def export_asns(request): + """Export CSV file containing ASN allocations to children.""" + conf = request.session['handle'] + s = cStringIO.StringIO() + csv_writer = csv.writer(s, delimiter=' ') + for childasn in ChildASN.objects.filter(child__issuer=conf): + csv_writer.writerow([childasn.child.handle, str(childasn.as_resource_range())]) + return serve_xml(s.getvalue(), '%s.asns' % conf.handle, ext='csv') + + +@handle_required +def import_asns(request): + conf = request.session['handle'] + if request.method == 'POST': + form = forms.ImportCSVForm(request.POST, request.FILES) + if form.is_valid(): + f = NamedTemporaryFile(prefix='asns', suffix='.csv', delete=False) + f.write(request.FILES['csv'].read()) + f.close() + z = Zookeeper(handle=conf.handle) + z.load_asns(f.name) + z.run_rpkid_now() + os.unlink(f.name) + messages.success(request, 'Successfully imported AS delgations from CSV file.') + return redirect(dashboard) + else: + form = forms.ImportCSVForm() + return render(request, 'app/import_resource_form.html', { + 'form_title': 'Import CSV containing ASN delegations', + 'form': form, + 'cancel_url': reverse(dashboard) + }) + + +@handle_required +def export_prefixes(request): + """Export CSV file containing ASN allocations to children.""" + conf = request.session['handle'] + s = cStringIO.StringIO() + csv_writer = csv.writer(s, delimiter=' ') + for childnet in ChildNet.objects.filter(child__issuer=conf): + csv_writer.writerow([childnet.child.handle, str(childnet.as_resource_range())]) + return serve_xml(s.getvalue(), '%s.prefixes' % conf.handle, ext='csv') + + +@handle_required +def import_prefixes(request): + conf = request.session['handle'] + if request.method == 'POST': + form = forms.ImportCSVForm(request.POST, request.FILES) + if form.is_valid(): + f = NamedTemporaryFile(prefix='prefixes', suffix='.csv', delete=False) + f.write(request.FILES['csv'].read()) + f.close() + z = Zookeeper(handle=conf.handle) + z.load_prefixes(f.name) + z.run_rpkid_now() + os.unlink(f.name) + messages.success(request, 'Successfully imported prefix delegations from CSV file.') + return redirect(dashboard) + else: + form = forms.ImportCSVForm() + return render(request, 'app/import_resource_form.html', { + 'form_title': 'Import CSV containing Prefix delegations', + 'form': form, + 'cancel_url': reverse(dashboard) + }) + + +@handle_required +def parent_import(request): + conf = request.session['handle'] + return generic_import(request, conf.parents, Zookeeper.configure_parent) + + +@handle_required +def parent_detail(request, pk): + return render(request, 'app/parent_detail.html', { + 'object': get_object_or_404(request.session['handle'].parents, pk=pk)}) + + +@handle_required +def parent_delete(request, pk): + conf = request.session['handle'] + obj = get_object_or_404(conf.parents, pk=pk) # confirm permission + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=log) + z.delete_parent(obj.handle) + z.synchronize_ca() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': 'app/parent_detail.html' + }) + + +@handle_required +def parent_export(request, pk): + """Export XML repository request for a given parent.""" + conf = request.session['handle'] + parent = get_object_or_404(conf.parents, pk=pk) + z = Zookeeper(handle=conf.handle) + xml = z.generate_repository_request(parent) + return serve_xml(str(xml), '%s.repository' % parent.handle) + + +@handle_required +def child_import(request): + conf = request.session['handle'] + return generic_import(request, conf.children, Zookeeper.configure_child) + + +@handle_required +def child_add_prefix(request, pk): + logstream = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children, pk=pk) + if request.method == 'POST': + form = forms.AddNetForm(request.POST, child=child) + if form.is_valid(): + address_range = form.cleaned_data.get('address_range') + r = resource_range_ip.parse_str(address_range) + version = 'IPv%d' % r.version + child.address_ranges.create(start_ip=str(r.min), end_ip=str(r.max), + version=version) + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(child.get_absolute_url()) + else: + form = forms.AddNetForm(child=child) + return render(request, 'app/app_form.html', + {'object': child, 'form': form, 'form_title': 'Add Prefix'}) + + +@handle_required +def child_add_asn(request, pk): + logstream = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children, pk=pk) + if request.method == 'POST': + form = forms.AddASNForm(request.POST, child=child) + if form.is_valid(): + asns = form.cleaned_data.get('asns') + r = resource_range_as.parse_str(asns) + child.asns.create(start_as=r.min, end_as=r.max) + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(child.get_absolute_url()) + else: + form = forms.AddASNForm(child=child) + return render(request, 'app/app_form.html', + {'object': child, 'form': form, 'form_title': 'Add ASN'}) + + +@handle_required +def child_detail(request, pk): + child = get_object_or_404(request.session['handle'].children, pk=pk) + return render(request, 'app/child_detail.html', {'object': child}) + + +@handle_required +def child_edit(request, pk): + """Edit the end validity date for a resource handle's child.""" + log = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children.all(), pk=pk) + form_class = forms.ChildForm(child) + if request.method == 'POST': + form = form_class(request.POST, request.FILES) + if form.is_valid(): + child.valid_until = sundial.datetime.from_datetime(form.cleaned_data.get('valid_until')) + child.save() + # remove AS & prefixes that are not selected in the form + models.ChildASN.objects.filter(child=child).exclude(pk__in=form.cleaned_data.get('as_ranges')).delete() + models.ChildNet.objects.filter(child=child).exclude(pk__in=form.cleaned_data.get('address_ranges')).delete() + Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() + return http.HttpResponseRedirect(child.get_absolute_url()) + else: + form = form_class(initial={ + 'as_ranges': child.asns.all(), + 'address_ranges': child.address_ranges.all()}) + + return render(request, 'app/app_form.html', { + 'object': child, + 'form': form, + 'form_title': 'Edit Child: ' + child.handle, + }) + + +@handle_required +def child_response(request, pk): + """ + Export the XML file containing the output of the configure_child + to send back to the client. + + """ + conf = request.session['handle'] + child = get_object_or_404(models.Child, issuer=conf, pk=pk) + z = Zookeeper(handle=conf.handle) + xml = z.generate_parental_response(child) + resp = serve_xml(str(xml), child.handle) + return resp + + +@handle_required +def child_delete(request, pk): + logstream = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=logstream) + z.delete_child(child.handle) + z.synchronize_ca() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': child, + 'form': form, + 'parent_template': 'app/child_detail.html' + }) + + +@handle_required +def roa_detail(request, pk): + conf = request.session['handle'] + obj = get_object_or_404(conf.roas, pk=pk) + return render(request, 'app/roa_detail.html', {'object': obj}) + + +def get_covered_routes(rng, max_prefixlen, asn): + """Returns a list of routeview.models.RouteOrigin objects which would + change validation status if a ROA were created with the parameters to this + function. + + A "newstatus" attribute is monkey-patched on the RouteOrigin objects which + can be used in the template. "status" remains the current validation + status of the object. + + """ + + # find all routes that match or are completed covered by the proposed new roa + qs = RouteOrigin.objects.filter( + prefix_min__gte=rng.min, + prefix_max__lte=rng.max + ) + routes = [] + for route in qs: + status = route.status + # tweak the validation status due to the presence of the + # new ROA. Don't need to check the prefix bounds here + # because all the matches routes will be covered by this + # new ROA + if status == 'unknown': + # if the route was previously unknown (no covering + # ROAs), then: + # if the AS matches, it is valid, otherwise invalid + if (route.asn != 0 and route.asn == asn and route.prefixlen <= max_prefixlen): + route.newstatus = 'valid' + else: + route.newstatus = 'invalid' + routes.append(route) + elif status == 'invalid': + # if the route was previously invalid, but this new ROA + # matches the ASN, it is now valid + if route.asn != 0 and route.asn == asn and route.prefixlen <= max_prefixlen: + route.newstatus = 'valid' + routes.append(route) + + return routes + + +@handle_required +def roa_create(request): + """Present the user with a form to create a ROA. + + Doesn't use the generic create_object() form because we need to + create both the ROARequest and ROARequestPrefix objects. + + """ + + conf = request.session['handle'] + if request.method == 'POST': + form = forms.ROARequest(request.POST, request.FILES, conf=conf) + if form.is_valid(): + asn = form.cleaned_data.get('asn') + rng = form._as_resource_range() # FIXME calling "private" method + max_prefixlen = int(form.cleaned_data.get('max_prefixlen')) + + routes = get_covered_routes(rng, max_prefixlen, asn) + + prefix = str(rng) + form = forms.ROARequestConfirm(initial={'asn': asn, + 'prefix': prefix, + 'max_prefixlen': max_prefixlen}) + return render(request, 'app/roarequest_confirm_form.html', + {'form': form, + 'asn': asn, + 'prefix': prefix, + 'max_prefixlen': max_prefixlen, + 'routes': routes}) + else: + # pull initial values from query parameters + d = {} + for s in ('asn', 'prefix'): + if s in request.GET: + d[s] = request.GET[s] + form = forms.ROARequest(initial=d) + + return render(request, 'app/roarequest_form.html', {'form': form}) + + +class ROARequestFormSet(BaseFormSet): + """There is no way to pass arbitrary keyword arguments to the form + constructor, so we have to override BaseFormSet to allow it. + + """ + def __init__(self, *args, **kwargs): + self.conf = kwargs.pop('conf') + super(ROARequestFormSet, self).__init__(*args, **kwargs) + + def _construct_forms(self): + self.forms = [] + for i in xrange(self.total_form_count()): + self.forms.append(self._construct_form(i, conf=self.conf)) + + +def split_with_default(s): + xs = s.split(',') + if len(xs) == 1: + return xs[0], None + return xs + + +@handle_required +def roa_create_multi(request): + """version of roa_create that uses a formset to allow entry of multiple + roas on a single page. + + ROAs can be specified in the GET query string, as such: + + ?roa=prefix,asn + + Mulitple ROAs may be specified: + + ?roa=prefix,asn+roa=prefix2,asn2 + + If an IP range is specified, it will be automatically split into multiple + prefixes: + + ?roa=1.1.1.1-2.2.2.2,42 + + The ASN may optionally be omitted. + + """ + + conf = request.session['handle'] + if request.method == 'GET': + init = [] + for x in request.GET.getlist('roa'): + rng, asn = split_with_default(x) + rng = resource_range_ip.parse_str(rng) + if rng.can_be_prefix: + init.append({'asn': asn, 'prefix': str(rng)}) + else: + v = [] + rng.chop_into_prefixes(v) + init.extend([{'asn': asn, 'prefix': str(p)} for p in v]) + formset = formset_factory(forms.ROARequest, formset=ROARequestFormSet, + can_delete=True)(initial=init, conf=conf) + elif request.method == 'POST': + formset = formset_factory(forms.ROARequest, formset=ROARequestFormSet, + extra=0, can_delete=True)(request.POST, request.FILES, conf=conf) + if formset.is_valid(): + routes = [] + v = [] + # as of Django 1.4.5 we still can't use formset.cleaned_data + # because deleted forms are not excluded, which causes an + # AttributeError to be raised. + for form in formset: + if hasattr(form, 'cleaned_data') and form.cleaned_data: # exclude empty forms + asn = form.cleaned_data.get('asn') + rng = resource_range_ip.parse_str(form.cleaned_data.get('prefix')) + max_prefixlen = int(form.cleaned_data.get('max_prefixlen')) + # FIXME: This won't do the right thing in the event that a + # route is covered by multiple ROAs created in the form. + # You will see duplicate entries, each with a potentially + # different validation status. + routes.extend(get_covered_routes(rng, max_prefixlen, asn)) + v.append({'prefix': str(rng), 'max_prefixlen': max_prefixlen, + 'asn': asn}) + # if there were no rows, skip the confirmation step + if v: + formset = formset_factory(forms.ROARequestConfirm, extra=0)(initial=v) + return render(request, 'app/roarequest_confirm_multi_form.html', + {'routes': routes, 'formset': formset, 'roas': v}) + return render(request, 'app/roarequest_multi_form.html', + {'formset': formset}) + + +@handle_required +def roa_create_confirm(request): + """This function is called when the user confirms the creation of a ROA + request. It is responsible for updating the IRDB. + + """ + conf = request.session['handle'] + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.ROARequestConfirm(request.POST, request.FILES) + if form.is_valid(): + asn = form.cleaned_data.get('asn') + prefix = form.cleaned_data.get('prefix') + rng = resource_range_ip.parse_str(prefix) + max_prefixlen = form.cleaned_data.get('max_prefixlen') + # Always create ROA requests with a single prefix. + # https://trac.rpki.net/ticket/32 + roa = models.ROARequest.objects.create(issuer=conf, asn=asn) + v = 'IPv%d' % rng.version + roa.prefixes.create(version=v, prefix=str(rng.min), + prefixlen=rng.prefixlen(), + max_prefixlen=max_prefixlen) + Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + # What should happen when the submission form isn't valid? For now + # just fall through and redirect back to the ROA creation form + return http.HttpResponseRedirect(reverse(roa_create)) + + +@handle_required +def roa_create_multi_confirm(request): + """This function is called when the user confirms the creation of a ROA + request. It is responsible for updating the IRDB. + + """ + conf = request.session['handle'] + log = request.META['wsgi.errors'] + if request.method == 'POST': + formset = formset_factory(forms.ROARequestConfirm, extra=0)(request.POST, request.FILES) + if formset.is_valid(): + for cleaned_data in formset.cleaned_data: + asn = cleaned_data.get('asn') + prefix = cleaned_data.get('prefix') + rng = resource_range_ip.parse_str(prefix) + max_prefixlen = cleaned_data.get('max_prefixlen') + # Always create ROA requests with a single prefix. + # https://trac.rpki.net/ticket/32 + roa = models.ROARequest.objects.create(issuer=conf, asn=asn) + v = 'IPv%d' % rng.version + roa.prefixes.create(version=v, prefix=str(rng.min), + prefixlen=rng.prefixlen(), + max_prefixlen=max_prefixlen) + Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() + return redirect(dashboard) + # What should happen when the submission form isn't valid? For now + # just fall through and redirect back to the ROA creation form + return http.HttpResponseRedirect(reverse(roa_create_multi)) + + +@handle_required +def roa_delete(request, pk): + """Handles deletion of a single ROARequest object. + + Uses a form for double confirmation, displaying how the route + validation status may change as a result. + + """ + + conf = request.session['handle'] + roa = get_object_or_404(conf.roas, pk=pk) + if request.method == 'POST': + roa.delete() + Zookeeper(handle=conf.handle).run_rpkid_now() + return redirect(reverse(dashboard)) + + ### Process GET ### + + # note: assumes we only generate one prefix per ROA + roa_prefix = roa.prefixes.all()[0] + rng = roa_prefix.as_resource_range() + + routes = [] + for route in roa.routes: + # select all roas which cover this route + # excluding the current roa + # note: we can't identify the exact ROA here, because we only know what + # was requested to rpkid + roas = route.roas.exclude( + asid=roa.asn, + prefixes__prefix_min=rng.min, + prefixes__prefix_max=rng.max, + prefixes__max_length=roa_prefix.max_prefixlen + ) + + # subselect exact match + if route.asn != 0 and roas.filter(asid=route.asn, + prefixes__max_length__gte=route.prefixlen).exists(): + route.newstatus = 'valid' + elif roas.exists(): + route.newstatus = 'invalid' + else: + route.newstatus = 'unknown' + # we may want to ignore routes for which there is no status change, + # but the user may want to see that nothing has changed explicitly + routes.append(route) + + return render(request, 'app/roarequest_confirm_delete.html', + {'object': roa, 'routes': routes}) + + +@handle_required +def roa_clone(request, pk): + conf = request.session['handle'] + roa = get_object_or_404(conf.roas, pk=pk) + return redirect( + reverse(roa_create_multi) + "?roa=" + str(roa.prefixes.all()[0].as_roa_prefix()) + ) + + +@handle_required +def roa_import(request): + """Import CSV containing ROA declarations.""" + if request.method == 'POST': + form = forms.ImportCSVForm(request.POST, request.FILES) + if form.is_valid(): + import tempfile + tmp = tempfile.NamedTemporaryFile(suffix='.csv', prefix='roas', delete=False) + tmp.write(request.FILES['csv'].read()) + tmp.close() + z = Zookeeper(handle=request.session['handle']) + z.load_roa_requests(tmp.name) + z.run_rpkid_now() + os.unlink(tmp.name) + messages.success(request, 'Successfully imported ROAs.') + return redirect(dashboard) + else: + form = forms.ImportCSVForm() + return render(request, 'app/import_resource_form.html', { + 'form_title': 'Import ROAs from CSV', + 'form': form, + 'cancel_url': reverse(dashboard) + }) + + +@handle_required +def roa_export(request): + """Export CSV containing ROA declarations.""" + # FIXME: remove when Zookeeper can do this + f = cStringIO.StringIO() + csv_writer = csv.writer(f, delimiter=' ') + conf = request.session['handle'] + # each roa prefix gets a unique group so rpkid will issue separate roas + for group, roapfx in enumerate(ROARequestPrefix.objects.filter(roa_request__issuer=conf)): + csv_writer.writerow([str(roapfx.as_roa_prefix()), roapfx.roa_request.asn, '%s-%d' % (conf.handle, group)]) + resp = http.HttpResponse(f.getvalue(), mimetype='application/csv') + resp['Content-Disposition'] = 'attachment; filename=roas.csv' + return resp + + +class GhostbusterDetailView(DetailView): + def get_queryset(self): + return self.request.session['handle'].ghostbusters + + +@handle_required +def ghostbuster_delete(request, pk): + conf = request.session['handle'] + logstream = request.META['wsgi.errors'] + obj = get_object_or_404(conf.ghostbusters, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + obj.delete() + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty(request.POST, request.FILES) + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': 'app/ghostbusterrequest_detail.html' + }) + + +@handle_required +def ghostbuster_create(request): + conf = request.session['handle'] + logstream = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.GhostbusterRequestForm(request.POST, request.FILES, + conf=conf) + if form.is_valid(): + obj = form.save(commit=False) + obj.vcard = glue.ghostbuster_to_vcard(obj) + obj.save() + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.GhostbusterRequestForm(conf=conf) + return render(request, 'app/app_form.html', + {'form': form, 'form_title': 'New Ghostbuster Request'}) + + +@handle_required +def ghostbuster_edit(request, pk): + conf = request.session['handle'] + obj = get_object_or_404(conf.ghostbusters, pk=pk) + logstream = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.GhostbusterRequestForm(request.POST, request.FILES, + conf=conf, instance=obj) + if form.is_valid(): + obj = form.save(commit=False) + obj.vcard = glue.ghostbuster_to_vcard(obj) + obj.save() + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.GhostbusterRequestForm(conf=conf, instance=obj) + return render(request, 'app/app_form.html', + {'form': form, 'form_title': 'Edit Ghostbuster Request'}) + + +@handle_required +def refresh(request): + """ + Query rpkid, update the db, and redirect back to the dashboard. + + """ + glue.list_received_resources(request.META['wsgi.errors'], + request.session['handle']) + return http.HttpResponseRedirect(reverse(dashboard)) + + +@handle_required +def route_view(request): + """ + Display a list of global routing table entries which match resources + listed in received certificates. + + """ + conf = request.session['handle'] + count = request.GET.get('count', 25) + page = request.GET.get('page', 1) + + paginator = Paginator(conf.routes, count) + try: + routes = paginator.page(page) + except InvalidPage: + # page was empty, or page number was invalid + routes = [] + ts = dict((attr['name'], attr['ts']) for attr in models.Timestamp.objects.values()) + return render(request, 'app/routes_view.html', + {'routes': routes, 'timestamp': ts}) + + +def route_detail(request, pk): + """Show a list of ROAs that match a given IPv4 route.""" + route = get_object_or_404(models.RouteOrigin, pk=pk) + # when running rootd, viewing the 0.0.0.0/0 route will cause a fetch of all + # roas, so we paginate here, even though in the general case the number of + # objects will be small enough to fit a single page + count = request.GET.get('count', 25) + page = request.GET.get('page', 1) + paginator = Paginator(route.roa_prefixes.all(), count) + return render(request, 'app/route_detail.html', { + 'object': route, + 'roa_prefixes': paginator.page(page), + }) + + +def route_suggest(request): + """Handles POSTs from the route view and redirects to the ROA creation + page based on selected route objects. The form should contain elements of + the form "pk-NUM" where NUM is the RouteOrigin object id. + + """ + if request.method == 'POST': + routes = [] + for pk in request.POST.iterkeys(): + logger.debug(pk) + if pk.startswith("pk-"): + n = int(pk[3:]) + routes.append(n) + qs = RouteOrigin.objects.filter(pk__in=routes) + s = [] + for r in qs: + s.append('roa=%s/%d,%d' % (str(r.prefix_min), r.prefixlen, r.asn)) + p = '&'.join(s) + return redirect(reverse(roa_create_multi) + '?' + p) + + +@handle_required +def repository_detail(request, pk): + conf = request.session['handle'] + return render(request, + 'app/repository_detail.html', + {'object': get_object_or_404(conf.repositories, pk=pk)}) + + +@handle_required +def repository_delete(request, pk): + log = request.META['wsgi.errors'] + conf = request.session['handle'] + # Ensure the repository being deleted belongs to the current user. + obj = get_object_or_404(models.Repository, issuer=conf, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=log) + z.delete_repository(obj.handle) + z.synchronize_ca() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': + 'app/repository_detail.html', + }) + + +@handle_required +def repository_import(request): + """Import XML response file from repository operator.""" + return generic_import(request, + models.Repository.objects, + Zookeeper.configure_repository, + form_class=forms.ImportRepositoryForm, + post_import_redirect=reverse(dashboard)) + + +@superuser_required +def client_list(request): + """display a list of all repository client (irdb.models.Client)""" + + return render(request, 'app/client_list.html', { + 'object_list': models.Client.objects.all() + }) + + +@superuser_required +def client_detail(request, pk): + return render(request, 'app/client_detail.html', + {'object': get_object_or_404(models.Client, pk=pk)}) + + +@superuser_required +def client_delete(request, pk): + log = request.META['wsgi.errors'] + obj = get_object_or_404(models.Client, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + z = Zookeeper(logstream=log) + z.delete_publication_client(obj.handle) + z.synchronize_pubd() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': 'app/client_detail.html' + }) + + +@superuser_required +def client_import(request): + return generic_import(request, models.Client.objects, + Zookeeper.configure_publication_client, + form_class=forms.ImportClientForm, + post_import_redirect=reverse(dashboard)) + + +@superuser_required +def client_export(request, pk): + """Return the XML file resulting from a configure_publication_client + request. + + """ + client = get_object_or_404(models.Client, pk=pk) + z = Zookeeper() + xml = z.generate_repository_response(client) + return serve_xml(str(xml), '%s.repo' % z.handle) + + +### Routines for managing resource handles serviced by this server + +@superuser_required +def resource_holder_list(request): + """Display a list of all the RPKI handles managed by this server.""" + return render(request, 'app/resource_holder_list.html', { + 'object_list': models.Conf.objects.all() + }) + + +@superuser_required +def resource_holder_edit(request, pk): + """Display a list of all the RPKI handles managed by this server.""" + conf = get_object_or_404(models.Conf, pk=pk) + if request.method == 'POST': + form = forms.ResourceHolderForm(request.POST, request.FILES) + if form.is_valid(): + models.ConfACL.objects.filter(conf=conf).delete() + for user in form.cleaned_data.get('users'): + models.ConfACL.objects.create(user=user, conf=conf) + return redirect(resource_holder_list) + else: + users = [acl.user for acl in models.ConfACL.objects.filter(conf=conf).all()] + form = forms.ResourceHolderForm(initial={ + 'users': users + }) + return render(request, 'app/app_form.html', { + 'form_title': "Edit Resource Holder: " + conf.handle, + 'form': form, + 'cancel_url': reverse(resource_holder_list) + }) + + +@superuser_required +def resource_holder_delete(request, pk): + conf = get_object_or_404(models.Conf, pk=pk) + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.Empty(request.POST) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=log) + z.delete_self() + z.synchronize_deleted_ca() + return redirect(resource_holder_list) + else: + form = forms.Empty() + return render(request, 'app/app_confirm_delete.html', { + 'form_title': 'Delete Resource Holder: ' + conf.handle, + 'form': form, + 'cancel_url': reverse(resource_holder_list) + }) + + +@superuser_required +def resource_holder_create(request): + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.ResourceHolderCreateForm(request.POST, request.FILES) + if form.is_valid(): + handle = form.cleaned_data.get('handle') + parent = form.cleaned_data.get('parent') + + zk_child = Zookeeper(handle=handle, logstream=log) + identity_xml = zk_child.initialize_resource_bpki() + if parent: + # FIXME etree_wrapper should allow us to deal with file objects + t = NamedTemporaryFile(delete=False) + t.close() + + identity_xml.save(t.name) + zk_parent = Zookeeper(handle=parent.handle, logstream=log) + parent_response, _ = zk_parent.configure_child(t.name) + parent_response.save(t.name) + zk_parent.synchronize_ca() + repo_req, _ = zk_child.configure_parent(t.name) + repo_req.save(t.name) + repo_resp, _ = zk_parent.configure_publication_client(t.name) + repo_resp.save(t.name) + zk_parent.synchronize_pubd() + zk_child.configure_repository(t.name) + os.remove(t.name) + zk_child.synchronize_ca() + return redirect(resource_holder_list) + else: + form = forms.ResourceHolderCreateForm() + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Create Resource Holder', + 'cancel_url': reverse(resource_holder_list) + }) + + +### views for managing user logins to the web interface + +@superuser_required +def user_create(request): + if request.method == 'POST': + form = forms.UserCreateForm(request.POST, request.FILES) + if form.is_valid(): + username = form.cleaned_data.get('username') + pw = form.cleaned_data.get('password') + email = form.cleaned_data.get('email') + user = User.objects.create_user(username, email, pw) + for conf in form.cleaned_data.get('resource_holders'): + models.ConfACL.objects.create(user=user, conf=conf) + return redirect(user_list) + else: + form = forms.UserCreateForm() + + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Create User', + 'cancel_url': reverse(user_list), + }) + + +@superuser_required +def user_list(request): + """Display a list of all the RPKI handles managed by this server.""" + return render(request, 'app/user_list.html', { + 'object_list': User.objects.all() + }) + + +@superuser_required +def user_delete(request, pk): + user = get_object_or_404(User, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + user.delete() + return redirect(user_list) + else: + form = forms.Empty() + return render(request, 'app/app_confirm_delete.html', { + 'form_title': 'Delete User: ' + user.username, + 'form': form, + 'cancel_url': reverse(user_list) + }) + + +@superuser_required +def user_edit(request, pk): + user = get_object_or_404(User, pk=pk) + if request.method == 'POST': + form = forms.UserEditForm(request.POST) + if form.is_valid(): + pw = form.cleaned_data.get('pw') + if pw: + user.set_password(pw) + user.email = form.cleaned_data.get('email') + user.save() + models.ConfACL.objects.filter(user=user).delete() + handles = form.cleaned_data.get('resource_holders') + for conf in handles: + models.ConfACL.objects.create(user=user, conf=conf) + return redirect(user_list) + else: + form = forms.UserEditForm(initial={ + 'email': user.email, + 'resource_holders': models.Conf.objects.filter(confacl__user=user).all() + }) + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Edit User: ' + user.username, + 'cancel_url': reverse(user_list) + }) + + +class AlertListView(ListView): + # this nonsense is required to decorate CBVs + @method_decorator(handle_required) + def dispatch(self, request, *args, **kwargs): + return super(AlertListView, self).dispatch(request, *args, **kwargs) + + def get_queryset(self, **kwargs): + conf = self.request.session['handle'] + return conf.alerts.all() + + +class AlertDetailView(DetailView): + # this nonsense is required to decorate CBVs + @method_decorator(handle_required) + def dispatch(self, request, *args, **kwargs): + return super(AlertDetailView, self).dispatch(request, *args, **kwargs) + + def get_queryset(self, **kwargs): + conf = self.request.session['handle'] + return conf.alerts.all() + + def get_object(self, **kwargs): + obj = super(AlertDetailView, self).get_object(**kwargs) + # mark alert as read by the user + obj.seen = True + obj.save() + return obj + + +class AlertDeleteView(DeleteView): + success_url = reverse_lazy('alert-list') + + # this nonsense is required to decorate CBVs + @method_decorator(handle_required) + def dispatch(self, request, *args, **kwargs): + return super(AlertDeleteView, self).dispatch(request, *args, **kwargs) + + def get_queryset(self, **kwargs): + conf = self.request.session['handle'] + return conf.alerts.all() + + +@handle_required +def alert_clear_all(request): + """Clear all alerts associated with the current resource holder.""" + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + # delete alerts + request.session['handle'].clear_alerts() + return redirect('alert-list') + else: + form = forms.Empty() + return render(request, 'app/alert_confirm_clear.html', {'form': form}) diff --git a/rpki/gui/cacheview/__init__.py b/rpki/gui/cacheview/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/cacheview/__init__.py diff --git a/rpki/gui/cacheview/forms.py b/rpki/gui/cacheview/forms.py new file mode 100644 index 00000000..28b8ff24 --- /dev/null +++ b/rpki/gui/cacheview/forms.py @@ -0,0 +1,51 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django import forms + +from rpki.gui.cacheview.misc import parse_ipaddr +from rpki.exceptions import BadIPResource +from rpki.resource_set import resource_range_as + + +class SearchForm(forms.Form): + asn = forms.CharField(required=False, help_text='AS or range', label='AS') + addr = forms.CharField(required=False, max_length=40, help_text='range/CIDR', label='IP Address') + + def clean(self): + asn = self.cleaned_data.get('asn') + addr = self.cleaned_data.get('addr') + if (asn and addr) or ((not asn) and (not addr)): + raise forms.ValidationError, 'Please specify either an AS or IP range, not both' + + if asn: + try: + resource_range_as.parse_str(asn) + except ValueError: + raise forms.ValidationError, 'invalid AS range' + + if addr: + #try: + parse_ipaddr(addr) + #except BadIPResource: + # raise forms.ValidationError, 'invalid IP address range/prefix' + + return self.cleaned_data + + +class SearchForm2(forms.Form): + resource = forms.CharField(required=True) diff --git a/rpki/gui/cacheview/misc.py b/rpki/gui/cacheview/misc.py new file mode 100644 index 00000000..9a69645c --- /dev/null +++ b/rpki/gui/cacheview/misc.py @@ -0,0 +1,31 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +from rpki.resource_set import resource_range_ipv4, resource_range_ipv6 +from rpki.exceptions import BadIPResource + +def parse_ipaddr(s): + # resource_set functions only accept str + if isinstance(s, unicode): + s = s.encode() + s = s.strip() + r = resource_range_ipv4.parse_str(s) + try: + r = resource_range_ipv4.parse_str(s) + return 4, r + except BadIPResource: + r = resource_range_ipv6.parse_str(s) + return 6, r + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/cacheview/models.py b/rpki/gui/cacheview/models.py new file mode 100644 index 00000000..c3ee8421 --- /dev/null +++ b/rpki/gui/cacheview/models.py @@ -0,0 +1,237 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from datetime import datetime +import time + +from django.db import models +from django.core.urlresolvers import reverse + +import rpki.resource_set +import rpki.gui.models + + +class TelephoneField(models.CharField): + def __init__(self, *args, **kwargs): + kwargs['max_length'] = 255 + models.CharField.__init__(self, *args, **kwargs) + + +class AddressRange(rpki.gui.models.PrefixV4): + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.cacheview.views.addressrange_detail', [str(self.pk)]) + + +class AddressRangeV6(rpki.gui.models.PrefixV6): + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.cacheview.views.addressrange_detail_v6', + [str(self.pk)]) + + +class ASRange(rpki.gui.models.ASN): + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.cacheview.views.asrange_detail', [str(self.pk)]) + +kinds = list(enumerate(('good', 'warn', 'bad'))) +kinds_dict = dict((v, k) for k, v in kinds) + + +class ValidationLabel(models.Model): + """ + Represents a specific error condition defined in the rcynic XML + output file. + """ + label = models.CharField(max_length=79, db_index=True, unique=True) + status = models.CharField(max_length=255) + kind = models.PositiveSmallIntegerField(choices=kinds) + + def __unicode__(self): + return self.label + + +class RepositoryObject(models.Model): + """ + Represents a globally unique RPKI repository object, specified by its URI. + """ + uri = models.URLField(unique=True, db_index=True) + +generations = list(enumerate(('current', 'backup'))) +generations_dict = dict((val, key) for (key, val) in generations) + + +class ValidationStatus(models.Model): + timestamp = models.DateTimeField() + generation = models.PositiveSmallIntegerField(choices=generations, null=True) + status = models.ForeignKey(ValidationLabel) + repo = models.ForeignKey(RepositoryObject, related_name='statuses') + + +class SignedObject(models.Model): + """ + Abstract class to hold common metadata for all signed objects. + The signing certificate is ommitted here in order to give a proper + value for the 'related_name' attribute. + """ + repo = models.ForeignKey(RepositoryObject, related_name='cert', unique=True) + + # on-disk file modification time + mtime = models.PositiveIntegerField(default=0) + + # SubjectName + name = models.CharField(max_length=255) + + # value from the SKI extension + keyid = models.CharField(max_length=60, db_index=True) + + # validity period from EE cert which signed object + not_before = models.DateTimeField() + not_after = models.DateTimeField() + + def mtime_as_datetime(self): + """ + convert the local timestamp to UTC and convert to a datetime object + """ + return datetime.utcfromtimestamp(self.mtime + time.timezone) + + def status_id(self): + """ + Returns a HTML class selector for the current object based on its validation status. + The selector is chosen based on the current generation only. If there is any bad status, + return bad, else if there are any warn status, return warn, else return good. + """ + for x in reversed(kinds): + if self.repo.statuses.filter(generation=generations_dict['current'], status__kind=x[0]): + return x[1] + return None # should not happen + + def __unicode__(self): + return u'%s' % self.name + + +class Cert(SignedObject): + """ + Object representing a resource certificate. + """ + addresses = models.ManyToManyField(AddressRange, related_name='certs') + addresses_v6 = models.ManyToManyField(AddressRangeV6, related_name='certs') + asns = models.ManyToManyField(ASRange, related_name='certs') + issuer = models.ForeignKey('self', related_name='children', null=True) + sia = models.CharField(max_length=255) + + def get_absolute_url(self): + return reverse('cert-detail', args=[str(self.pk)]) + + def get_cert_chain(self): + """Return a list containing the complete certificate chain for this + certificate.""" + cert = self + x = [cert] + while cert != cert.issuer: + cert = cert.issuer + x.append(cert) + x.reverse() + return x + cert_chain = property(get_cert_chain) + + +class ROAPrefix(models.Model): + "Abstract base class for ROA mixin." + + max_length = models.PositiveSmallIntegerField() + + class Meta: + abstract = True + + def as_roa_prefix(self): + "Return value as a rpki.resource_set.roa_prefix_ip object." + rng = self.as_resource_range() + return self.roa_cls(rng.min, rng.prefixlen(), self.max_length) + + def __unicode__(self): + p = self.as_resource_range() + if p.prefixlen() == self.max_length: + return str(p) + return '%s-%s' % (str(p), self.max_length) + + +# ROAPrefix is declared first, so subclass picks up __unicode__ from it. +class ROAPrefixV4(ROAPrefix, rpki.gui.models.PrefixV4): + "One v4 prefix in a ROA." + + roa_cls = rpki.resource_set.roa_prefix_ipv4 + + @property + def routes(self): + """return all routes covered by this roa prefix""" + return RouteOrigin.objects.filter(prefix_min__gte=self.prefix_min, + prefix_max__lte=self.prefix_max) + + class Meta: + ordering = ('prefix_min',) + + +# ROAPrefix is declared first, so subclass picks up __unicode__ from it. +class ROAPrefixV6(ROAPrefix, rpki.gui.models.PrefixV6): + "One v6 prefix in a ROA." + + roa_cls = rpki.resource_set.roa_prefix_ipv6 + + class Meta: + ordering = ('prefix_min',) + + +class ROA(SignedObject): + asid = models.PositiveIntegerField() + prefixes = models.ManyToManyField(ROAPrefixV4, related_name='roas') + prefixes_v6 = models.ManyToManyField(ROAPrefixV6, related_name='roas') + issuer = models.ForeignKey('Cert', related_name='roas') + + def get_absolute_url(self): + return reverse('roa-detail', args=[str(self.pk)]) + + class Meta: + ordering = ('asid',) + + def __unicode__(self): + return u'ROA for AS%d' % self.asid + + +class Ghostbuster(SignedObject): + full_name = models.CharField(max_length=40) + email_address = models.EmailField(blank=True, null=True) + organization = models.CharField(blank=True, null=True, max_length=255) + telephone = TelephoneField(blank=True, null=True) + issuer = models.ForeignKey('Cert', related_name='ghostbusters') + + def get_absolute_url(self): + # note that ghostbuster-detail is different from gbr-detail! sigh + return reverse('ghostbuster-detail', args=[str(self.pk)]) + + def __unicode__(self): + if self.full_name: + return self.full_name + if self.organization: + return self.organization + if self.email_address: + return self.email_address + return self.telephone + + +from rpki.gui.routeview.models import RouteOrigin diff --git a/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html b/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html new file mode 100644 index 00000000..76edc1ba --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html @@ -0,0 +1,18 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} +<h1>{% block title %}IP Range Detail{% endblock %}</h1> + +<p> +IP Range: {{ object }} +</p> + +<p>Covered by the following resource certs:</p> + +<ul> +{% for cert in object.certs.all %} +<li><a href="{{ cert.get_absolute_url }}">{{ cert }}</a></li> +{% endfor %} +</ul> + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/cacheview_base.html b/rpki/gui/cacheview/templates/cacheview/cacheview_base.html new file mode 100644 index 00000000..ec71d740 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/cacheview_base.html @@ -0,0 +1,10 @@ +{% extends "base.html" %} +{% load url from future %} + +{% block sidebar %} +<form method='post' action='{% url 'res-search' %}'> + {% csrf_token %} + <input type='text' id='id_resource' name='resource' placeholder='prefix or AS'> + <button type='submit'>Search</button> +</form> +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/cert_detail.html b/rpki/gui/cacheview/templates/cacheview/cert_detail.html new file mode 100644 index 00000000..256e7780 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/cert_detail.html @@ -0,0 +1,105 @@ +{% extends "cacheview/signedobject_detail.html" %} + +{% block title %} +Resource Certificate Detail +{% endblock %} + +{% block detail %} + +<h2>RFC3779 Resources</h2> + +<table class='table table-striped'> + <thead> + <tr><th>AS Ranges</th><th>IP Ranges</th></tr> + </thead> + <tbody> + <tr> + <td style='text-align:left;vertical-align:top'> + <ul class='compact'> + {% for asn in object.asns.all %} + <li><a href="{{ asn.get_absolute_url }}">{{ asn }}</a></li> + {% endfor %} + </ul> + </td> + <td style='text-align:left;vertical-align:top'> + <ul class='compact'> + {% for rng in object.addresses.all %} + <li><a href="{{ rng.get_absolute_url }}">{{ rng }}</a></li> + {% endfor %} + </ul> + </td> + </tr> + </tbody> +</table> + +<div class='section'> +<h2>Issued Objects</h2> +<ul> + +{% if object.ghostbusters.all %} + <li> +<h3>Ghostbusters</h3> + +<table class='table table-striped'> + <thead> + <tr><th>Name</th><th>Expires</th></tr> + </thead> + <tbody> + +{% for g in object.ghostbusters.all %} + <tr class='{{ g.status_id }}'> + <td><a href="{{ g.get_absolute_url }}">{{ g }}</a></td> + <td>{{ g.not_after }}</td> + </tr> + </tbody> +{% endfor %} + +</table> +{% endif %} + +{% if object.roas.all %} + <li> +<h3>ROAs</h3> +<table class='table table-striped'> + <thead> + <tr><th>#</th><th>Prefix</th><th>AS</th><th>Expires</th></tr> + </thead> + <tbody> + {% for roa in object.roas.all %} + {% for pfx in roa.prefixes.all %} + <tr class='{{ roa.status_id }}'> + <td><a href="{{ roa.get_absolute_url }}">#</a></td> + <td>{{ pfx }}</td> + <td>{{ roa.asid }}</td> + <td>{{ roa.not_after }}</td> + </tr> + {% endfor %} + {% endfor %} + </tbody> +</table> +{% endif %} + +{% if object.children.all %} +<li> +<h3>Children</h3> +<table class='table table-striped'> + <thead> + <tr><th>Name</th><th>Expires</th></tr> + </thead> + <tbody> + + {% for child in object.children.all %} + <tr class='{{ child.status_id }}'> + <td><a href="{{ child.get_absolute_url }}">{{ child.name }}</a></td> + <td>{{ child.not_after }}</td> + </tr> + {% endfor %} + </tbody> +</table> +{% endif %} + +</ul> + +</div><!--issued objects--> + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html b/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html new file mode 100644 index 00000000..4215f757 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html @@ -0,0 +1,13 @@ +{% extends "cacheview/signedobject_detail.html" %} + +{% block title %}Ghostbuster Detail{% endblock %} + +{% block detail %} +<p> +<table class='table'> + <tr><td>Full Name</td><td>{{ object.full_name }}</td></tr> + <tr><td>Organization</td><td>{{ object.organization }}</td></tr> + <tr><td>Email</td><td>{{ object.email_address }}</td></tr> + <tr><td>Telephone</td><td>{{ object.telephone }}</td></tr> +</table> +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/global_summary.html b/rpki/gui/cacheview/templates/cacheview/global_summary.html new file mode 100644 index 00000000..0dbd0ffc --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/global_summary.html @@ -0,0 +1,26 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} +<div class='page-header'> + <h1>Browse Global RPKI</h1> +</div> + +<table class="table table-striped"> + <thead> + <tr> + <th>Name</th> + <th>Expires</th> + <th>URI</th> + </tr> + </thead> + <tbody> + {% for r in roots %} + <tr> + <td><a href="{{ r.get_absolute_url }}">{{ r.name }}</a></td> + <td>{{ r.not_after }}</td> + <td>{{ r.repo.uri }}</td> + </tr> + {% endfor %} + </tbody> +</table> +{% endblock content %} diff --git a/rpki/gui/cacheview/templates/cacheview/query_result.html b/rpki/gui/cacheview/templates/cacheview/query_result.html new file mode 100644 index 00000000..0694c531 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/query_result.html @@ -0,0 +1,21 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} + +<h1>{% block title %}Query Results{% endblock %}</h1> + +<table> + <tr><th>Prefix</th><th>AS</th><th>Valid</th><th>Until</th></tr> + {% for object in object_list %} + <tr class='{{ object.1.status.kind_as_str }}'> + <td>{{ object.0 }}</td> + <td>{{ object.1.asid }}</td> + <td><a href="{{ object.1.get_absolute_url }}">{{ object.1.ok }}</a></td> + <td>{{ object.1.not_after }}</td> + </tr> + {% endfor %} +</table> + +<p><a href="{% url rpki.gui.cacheview.views.query_view %}">new query</a></p> + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/roa_detail.html b/rpki/gui/cacheview/templates/cacheview/roa_detail.html new file mode 100644 index 00000000..39cc547b --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/roa_detail.html @@ -0,0 +1,18 @@ +{% extends "cacheview/signedobject_detail.html" %} + +{% block title %}ROA Detail{% endblock %} + +{% block detail %} +<p> +<table> + <tr><td>AS</td><td>{{ object.asid }}</td></tr> +</table> + +<h2>Prefixes</h2> + +<ul> +{% for pfx in object.prefixes.all %} +<li>{{ pfx }} +{% endfor %} +</ul> +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/search_form.html b/rpki/gui/cacheview/templates/cacheview/search_form.html new file mode 100644 index 00000000..1141615d --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/search_form.html @@ -0,0 +1,17 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block title %} +{{ search_type }} Search +{% endblock %} + +{% block content %} + +<h1>{{search_type}} Search</h1> + +<form method='post' action='{{ request.url }}'> + {% csrf_token %} + {{ form.as_p }} + <input type='submit' name='Search'> +</form> + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/search_result.html b/rpki/gui/cacheview/templates/cacheview/search_result.html new file mode 100644 index 00000000..7cbf852e --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/search_result.html @@ -0,0 +1,42 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} + +<div class='page-header'> + <h1>Search Results <small>{{ resource }}</small></h1> +</div> + +<h2>Matching Resource Certificates</h2> +{% if certs %} +<ul> +{% for cert in certs %} +<li><a href="{{ cert.get_absolute_url }}">{{ cert }}</a> +{% endfor %} +</ul> +{% else %} +<p>none</p> +{% endif %} + +<h2>Matching ROAs</h2> +{% if roas %} +<table class='table table-striped'> + <thead> + <tr> + <th>#</th><th>Prefix</th><th>AS</th> + </tr> + </thead> + <tbody> +{% for roa in roas %} +<tr> + <td><a href="{{ roa.get_absolute_url }}">#</a></td> + <td>{{ roa.prefixes.all.0 }}</td> + <td>{{ roa.asid }}</td> +</tr> +{% endfor %} +</tbody> +</table> +{% else %} +<p>none</p> +{% endif %} + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html b/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html new file mode 100644 index 00000000..22ae3d27 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html @@ -0,0 +1,58 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} +<div class='page-header'> +<h1>{% block title %}Signed Object Detail{% endblock %}</h1> +</div> + +<h2>Cert Info</h2> +<table class='table table-striped'> + <tr><td>Subject Name</td><td>{{ object.name }}</td></tr> + <tr><td>SKI</td><td>{{ object.keyid }}</td></tr> + {% if object.sia %} + <tr><td>SIA</td><td>{{ object.sia }}</td></tr> + {% endif %} + <tr><td>Not Before</td><td>{{ object.not_before }}</td></tr> + <tr><td>Not After</td><td>{{ object.not_after }}</td></tr> +</table> + +<h2>Metadata</h2> + +<table class='table table-striped'> + <tr><td>URI</td><td>{{ object.repo.uri }}</td></tr> + <tr><td>Last Modified</td><td>{{ object.mtime_as_datetime|date:"DATETIME_FORMAT" }}</td></tr> +</table> + +<h2>Validation Status</h2> +<table class='table table-striped'> + <thead> + <tr><th>Timestamp</th><th>Generation</th><th>Status</th></tr> + </thead> + <tbody> + {% for status in object.repo.statuses.all %} + <tr class="{{ status.status.get_kind_display }}"><td>{{ status.timestamp }}</td><td>{{ status.get_generation_display }}</td><td>{{ status.status.status }}</td></tr> + {% endfor %} + </tbody> +</table> + +<h2>X.509 Certificate Chain</h2> + +<table class='table table-striped'> + <thead> + <tr><th>Depth</th><th>Name</th></tr> + </thead> + <tbody> + +{% for cert in chain %} +<tr class='{{ cert.1.status_id }}'> + <td>{{ cert.0 }}</td> + <td><a href="{{ cert.1.get_absolute_url }}">{{ cert.1.name }}</a></td> +</tr> +{% endfor %} +</tbody> + +</table> + +{% block detail %}{% endblock %} + +{% endblock %} diff --git a/rpki/gui/cacheview/tests.py b/rpki/gui/cacheview/tests.py new file mode 100644 index 00000000..2247054b --- /dev/null +++ b/rpki/gui/cacheview/tests.py @@ -0,0 +1,23 @@ +""" +This file demonstrates two different styles of tests (one doctest and one +unittest). These will both pass when you run "manage.py test". + +Replace these with more appropriate tests for your application. +""" + +from django.test import TestCase + +class SimpleTest(TestCase): + def test_basic_addition(self): + """ + Tests that 1 + 1 always equals 2. + """ + self.failUnlessEqual(1 + 1, 2) + +__test__ = {"doctest": """ +Another way to test that 1 + 1 is equal to 2. + +>>> 1 + 1 == 2 +True +"""} + diff --git a/rpki/gui/cacheview/urls.py b/rpki/gui/cacheview/urls.py new file mode 100644 index 00000000..cc03a587 --- /dev/null +++ b/rpki/gui/cacheview/urls.py @@ -0,0 +1,32 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls import patterns, url +from rpki.gui.cacheview.views import (CertDetailView, RoaDetailView, + GhostbusterDetailView) + +urlpatterns = patterns('', + url(r'^search$', 'rpki.gui.cacheview.views.search_view', + name='res-search'), + url(r'^cert/(?P<pk>[^/]+)$', CertDetailView.as_view(), name='cert-detail'), + url(r'^gbr/(?P<pk>[^/]+)$', GhostbusterDetailView.as_view(), + name='ghostbuster-detail'), + url(r'^roa/(?P<pk>[^/]+)$', RoaDetailView.as_view(), name='roa-detail'), + (r'^$', 'rpki.gui.cacheview.views.global_summary'), +) + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/cacheview/util.py b/rpki/gui/cacheview/util.py new file mode 100644 index 00000000..0d3d7ae3 --- /dev/null +++ b/rpki/gui/cacheview/util.py @@ -0,0 +1,432 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' +__all__ = ('import_rcynic_xml') + +default_logfile = '/var/rcynic/data/rcynic.xml' +default_root = '/var/rcynic/data' +object_accepted = None # set by import_rcynic_xml() + +import time +import vobject +import logging +import os +import stat +from socket import getfqdn +from cStringIO import StringIO + +from django.db import transaction +import django.db.models + +import rpki +import rpki.gui.app.timestamp +from rpki.gui.app.models import Conf, Alert +from rpki.gui.cacheview import models +from rpki.rcynic import rcynic_xml_iterator, label_iterator +from rpki.sundial import datetime +from rpki.irdb.zookeeper import Zookeeper + +logger = logging.getLogger(__name__) + + +def rcynic_cert(cert, obj): + obj.sia = cert.sia_directory_uri + + # object must be saved for the related manager methods below to work + obj.save() + + # for the root cert, we can't set inst.issuer = inst until + # after inst.save() has been called. + if obj.issuer is None: + obj.issuer = obj + obj.save() + + # resources can change when a cert is updated + obj.asns.clear() + obj.addresses.clear() + + if cert.resources.asn.inherit: + # FIXME: what happens when the parent's resources change and the child + # cert is not reissued? + obj.asns.add(*obj.issuer.asns.all()) + else: + for asr in cert.resources.asn: + logger.debug('processing %s' % asr) + + attrs = {'min': asr.min, 'max': asr.max} + q = models.ASRange.objects.filter(**attrs) + if not q: + obj.asns.create(**attrs) + else: + obj.asns.add(q[0]) + + # obj.issuer is None the first time we process the root cert in the + # hierarchy, so we need to guard against dereference + for cls, addr_obj, addrset, parentset in ( + models.AddressRange, obj.addresses, cert.resources.v4, + obj.issuer.addresses.all() if obj.issuer else [] + ), ( + models.AddressRangeV6, obj.addresses_v6, cert.resources.v6, + obj.issuer.addresses_v6.all() if obj.issuer else [] + ): + if addrset.inherit: + addr_obj.add(*parentset) + else: + for rng in addrset: + logger.debug('processing %s' % rng) + + attrs = {'prefix_min': rng.min, 'prefix_max': rng.max} + q = cls.objects.filter(**attrs) + if not q: + addr_obj.create(**attrs) + else: + addr_obj.add(q[0]) + + +def rcynic_roa(roa, obj): + obj.asid = roa.asID + # object must be saved for the related manager methods below to work + obj.save() + obj.prefixes.clear() + obj.prefixes_v6.clear() + for pfxset in roa.prefix_sets: + if pfxset.__class__.__name__ == 'roa_prefix_set_ipv6': + roa_cls = models.ROAPrefixV6 + prefix_obj = obj.prefixes_v6 + else: + roa_cls = models.ROAPrefixV4 + prefix_obj = obj.prefixes + + for pfx in pfxset: + attrs = {'prefix_min': pfx.min(), + 'prefix_max': pfx.max(), + 'max_length': pfx.max_prefixlen} + q = roa_cls.objects.filter(**attrs) + if not q: + prefix_obj.create(**attrs) + else: + prefix_obj.add(q[0]) + + +def rcynic_gbr(gbr, obj): + vcard = vobject.readOne(gbr.vcard) + obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None + obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None + obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None + obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None + obj.save() + +LABEL_CACHE = {} + +# dict keeping mapping of uri to (handle, old status, new status) for objects +# published by the local rpkid +uris = {} + +dispatch = { + 'rcynic_certificate': rcynic_cert, + 'rcynic_roa': rcynic_roa, + 'rcynic_ghostbuster': rcynic_gbr +} + +model_class = { + 'rcynic_certificate': models.Cert, + 'rcynic_roa': models.ROA, + 'rcynic_ghostbuster': models.Ghostbuster +} + + +def save_status(repo, vs): + timestamp = datetime.fromXMLtime(vs.timestamp).to_sql() + status = LABEL_CACHE[vs.status] + g = models.generations_dict[vs.generation] if vs.generation else None + repo.statuses.create(generation=g, timestamp=timestamp, status=status) + + # if this object is in our interest set, update with the current validation + # status + if repo.uri in uris: + x, y, z, q = uris[repo.uri] + valid = z or (status is object_accepted) # don't clobber previous True value + uris[repo.uri] = x, y, valid, repo + + if status is not object_accepted: + return + + cls = model_class[vs.file_class.__name__] + # find the instance of the signedobject subclass that is associated with + # this repo instance (may be empty when not accepted) + inst_qs = cls.objects.filter(repo=repo) + + logger.debug('processing %s' % vs.filename) + + if not inst_qs: + inst = cls(repo=repo) + logger.debug('object not found in db, creating new object cls=%s id=%s' % ( + cls, + id(inst) + )) + else: + inst = inst_qs[0] + + try: + # determine if the object is changed/new + mtime = os.stat(vs.filename)[stat.ST_MTIME] + except OSError as e: + logger.error('unable to stat %s: %s %s' % ( + vs.filename, type(e), e)) + # treat as if missing from rcynic.xml + # use inst_qs rather than deleting inst so that we don't raise an + # exception for newly created objects (inst_qs will be empty) + inst_qs.delete() + return + + if mtime != inst.mtime: + inst.mtime = mtime + try: + obj = vs.obj # causes object to be lazily loaded + except Exception, e: + logger.warning('Caught %s while processing %s: %s' % ( + type(e), vs.filename, e)) + return + + inst.not_before = obj.notBefore.to_sql() + inst.not_after = obj.notAfter.to_sql() + inst.name = obj.subject + inst.keyid = obj.ski + + # look up signing cert + if obj.issuer == obj.subject: + # self-signed cert (TA) + assert(isinstance(inst, models.Cert)) + inst.issuer = None + else: + # if an object has moved in the repository, the entry for + # the old location will still be in the database, but + # without any object_accepted in its validtion status + qs = models.Cert.objects.filter( + keyid=obj.aki, + name=obj.issuer, + repo__statuses__status=object_accepted + ) + ncerts = len(qs) + if ncerts == 0: + logger.warning('unable to find signing cert with ski=%s (%s)' % (obj.aki, obj.issuer)) + return + else: + if ncerts > 1: + # multiple matching certs, all of which are valid + logger.warning('Found multiple certs matching ski=%s sn=%s' % (obj.aki, obj.issuer)) + for c in qs: + logger.warning(c.repo.uri) + # just use the first match + inst.issuer = qs[0] + + try: + # do object-specific tasks + dispatch[vs.file_class.__name__](obj, inst) + except: + logger.error('caught exception while processing rcynic_object:\n' + 'vs=' + repr(vs) + '\nobj=' + repr(obj)) + # .show() writes to stdout + obj.show() + raise + + logger.debug('object saved id=%s' % id(inst)) + else: + logger.debug('object is unchanged') + + +@transaction.commit_on_success +def process_cache(root, xml_file): + + last_uri = None + repo = None + + logger.info('clearing validation statuses') + models.ValidationStatus.objects.all().delete() + + logger.info('updating validation status') + for vs in rcynic_xml_iterator(root, xml_file): + if vs.uri != last_uri: + repo, created = models.RepositoryObject.objects.get_or_create(uri=vs.uri) + last_uri = vs.uri + save_status(repo, vs) + + # garbage collection + # remove all objects which have no ValidationStatus references, which + # means they did not appear in the last XML output + logger.info('performing garbage collection') + + # Delete all objects that have zero validation status elements. + models.RepositoryObject.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0).delete() + + # Delete all SignedObject instances that were not accepted. There may + # exist rows for objects that were previously accepted. + # See https://trac.rpki.net/ticket/588#comment:30 + # + # We have to do this here rather than in save_status() because the + # <validation_status/> elements are not guaranteed to be consecutive for a + # given URI. see https://trac.rpki.net/ticket/625#comment:5 + models.SignedObject.objects.exclude(repo__statuses__status=object_accepted).delete() + + # ROAPrefixV* objects are M2M so they are not automatically deleted when + # their ROA object disappears + models.ROAPrefixV4.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete() + models.ROAPrefixV6.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete() + logger.info('done with garbage collection') + + +@transaction.commit_on_success +def process_labels(xml_file): + logger.info('updating labels...') + + for label, kind, desc in label_iterator(xml_file): + logger.debug('label=%s kind=%s desc=%s' % (label, kind, desc)) + if kind: + q = models.ValidationLabel.objects.filter(label=label) + if not q: + obj = models.ValidationLabel(label=label) + else: + obj = q[0] + + obj.kind = models.kinds_dict[kind] + obj.status = desc + obj.save() + + LABEL_CACHE[label] = obj + + +def fetch_published_objects(): + """Query rpkid for all objects published by local users, and look up the + current validation status of each object. The validation status is used + later to send alerts for objects which have transitioned to invalid. + + """ + logger.info('querying for published objects') + + handles = [conf.handle for conf in Conf.objects.all()] + req = [rpki.left_right.list_published_objects_elt.make_pdu(action='list', self_handle=h, tag=h) for h in handles] + z = Zookeeper() + pdus = z.call_rpkid(*req) + for pdu in pdus: + if isinstance(pdu, rpki.left_right.list_published_objects_elt): + # Look up the object in the rcynic cache + qs = models.RepositoryObject.objects.filter(uri=pdu.uri) + if qs: + # get the current validity state + valid = qs[0].statuses.filter(status=object_accepted).exists() + uris[pdu.uri] = (pdu.self_handle, valid, False, None) + logger.debug('adding ' + pdu.uri) + else: + # this object is not in the cache. it was either published + # recently, or disappared previously. if it disappeared + # previously, it has already been alerted. in either case, we + # omit the uri from the list since we are interested only in + # objects which were valid and are no longer valid + pass + elif isinstance(pdu, rpki.left_right.report_error_elt): + logging.error('rpkid reported an error: %s' % pdu.error_code) + + +class Handle(object): + def __init__(self): + self.invalid = [] + self.missing = [] + + def add_invalid(self, v): + self.invalid.append(v) + + def add_missing(self, v): + self.missing.append(v) + + +def notify_invalid(): + """Send email alerts to the addresses registered in ghostbuster records for + any invalid objects that were published by users of this system. + + """ + + logger.info('sending notifications for invalid objects') + + # group invalid objects by user + notify = {} + for uri, v in uris.iteritems(): + handle, old_status, new_status, obj = v + + if obj is None: + # object went missing + n = notify.get(handle, Handle()) + n.add_missing(uri) + # only select valid->invalid + elif old_status and not new_status: + n = notify.get(handle, Handle()) + n.add_invalid(obj) + + for handle, v in notify.iteritems(): + conf = Conf.objects.get(handle) + + msg = StringIO() + msg.write('This is an alert about problems with objects published by ' + 'the resource handle %s.\n\n' % handle) + + if v.invalid: + msg.write('The following objects were previously valid, but are ' + 'now invalid:\n') + + for o in v.invalid: + msg.write('\n') + msg.write(o.repo.uri) + msg.write('\n') + for s in o.statuses.all(): + msg.write('\t') + msg.write(s.status.label) + msg.write(': ') + msg.write(s.status.status) + msg.write('\n') + + if v.missing: + msg.write('The following objects were previously valid but are no ' + 'longer in the cache:\n') + + for o in v.missing: + msg.write(o) + msg.write('\n') + + msg.write("""-- +You are receiving this email because your address is published in a Ghostbuster +record, or is the default email address for this resource holder account on +%s.""" % getfqdn()) + + from_email = 'root@' + getfqdn() + subj = 'invalid RPKI object alert for resource handle %s' % conf.handle + conf.send_alert(subj, msg.getvalue(), from_email, severity=Alert.ERROR) + + +def import_rcynic_xml(root=default_root, logfile=default_logfile): + """Load the contents of rcynic.xml into the rpki.gui.cacheview database.""" + + global object_accepted + + start = time.time() + process_labels(logfile) + object_accepted = LABEL_CACHE['object_accepted'] + fetch_published_objects() + process_cache(root, logfile) + notify_invalid() + + rpki.gui.app.timestamp.update('rcynic_import') + + stop = time.time() + logger.info('elapsed time %d seconds.' % (stop - start)) diff --git a/rpki/gui/cacheview/views.py b/rpki/gui/cacheview/views.py new file mode 100644 index 00000000..94870eb2 --- /dev/null +++ b/rpki/gui/cacheview/views.py @@ -0,0 +1,172 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.views.generic import DetailView +from django.shortcuts import render +from django.db.models import F + +from rpki.gui.cacheview import models, forms, misc +from rpki.resource_set import resource_range_as, resource_range_ip +from rpki.POW import IPAddress +from rpki.exceptions import BadIPResource + + +def cert_chain(obj): + """ + returns an iterator covering all certs from the root cert down to the EE. + """ + chain = [obj] + while obj != obj.issuer: + obj = obj.issuer + chain.append(obj) + return zip(range(len(chain)), reversed(chain)) + + +class SignedObjectDetailView(DetailView): + def get_context_data(self, **kwargs): + context = super(SignedObjectDetailView, + self).get_context_data(**kwargs) + context['chain'] = cert_chain(self.object) + return context + + +class RoaDetailView(SignedObjectDetailView): + model = models.ROA + + +class CertDetailView(SignedObjectDetailView): + model = models.Cert + + +class GhostbusterDetailView(SignedObjectDetailView): + model = models.Ghostbuster + + +def search_view(request): + certs = None + roas = None + + if request.method == 'POST': + form = forms.SearchForm2(request.POST, request.FILES) + if form.is_valid(): + resource = form.cleaned_data.get('resource') + # try to determine the type of input given + try: + r = resource_range_as.parse_str(resource) + certs = models.Cert.objects.filter(asns__min__gte=r.min, + asns__max__lte=r.max) + roas = models.ROA.objects.filter(asid__gte=r.min, + asid__lte=r.max) + except: + try: + r = resource_range_ip.parse_str(resource) + if r.version == 4: + certs = models.Cert.objects.filter( + addresses__prefix_min__lte=r.min, + addresses__prefix_max__gte=r.max) + roas = models.ROA.objects.filter( + prefixes__prefix_min__lte=r.min, + prefixes__prefix_max__gte=r.max) + else: + certs = models.Cert.objects.filter( + addresses_v6__prefix_min__lte=r.min, + addresses_v6__prefix_max__gte=r.max) + roas = models.ROA.objects.filter( + prefixes_v6__prefix_min__lte=r.min, + prefixes_v6__prefix_max__gte=r.max) + except BadIPResource: + pass + + return render(request, 'cacheview/search_result.html', + {'resource': resource, 'certs': certs, 'roas': roas}) + + +def cmp_prefix(x, y): + r = cmp(x[0].family, y[0].family) + if r == 0: + r = cmp(x[2], y[2]) # integer address + if r == 0: + r = cmp(x[0].bits, y[0].bits) + if r == 0: + r = cmp(x[0].max_length, y[0].max_length) + if r == 0: + r = cmp(x[1].asid, y[1].asid) + return r + + +#def cmp_prefix(x,y): +# for attr in ('family', 'prefix', 'bits', 'max_length'): +# r = cmp(getattr(x[0], attr), getattr(y[0], attr)) +# if r: +# return r +# return cmp(x[1].asid, y[1].asid) + + +def query_view(request): + """ + Allow the user to search for an AS or prefix, and show all published ROA + information. + """ + + if request.method == 'POST': + form = forms.SearchForm(request.POST, request.FILES) + if form.is_valid(): + certs = None + roas = None + + addr = form.cleaned_data.get('addr') + asn = form.cleaned_data.get('asn') + + if addr: + family, r = misc.parse_ipaddr(addr) + prefixes = models.ROAPrefix.objects.filter(family=family, prefix=str(r.min)) + + prefix_list = [] + for pfx in prefixes: + for roa in pfx.roas.all(): + prefix_list.append((pfx, roa)) + elif asn: + r = resource_range_as.parse_str(asn) + roas = models.ROA.objects.filter(asid__gte=r.min, asid__lte=r.max) + + # display the results sorted by prefix + prefix_list = [] + for roa in roas: + for pfx in roa.prefixes.all(): + addr = IPAddress(pfx.prefix.encode()) + prefix_list.append((pfx, roa, addr)) + prefix_list.sort(cmp=cmp_prefix) + + return render('cacheview/query_result.html', + {'object_list': prefix_list}, request) + else: + form = forms.SearchForm() + + return render('cacheview/search_form.html', { + 'form': form, 'search_type': 'ROA '}, request) + + +def global_summary(request): + """Display a table summarizing the state of the global RPKI.""" + + roots = models.Cert.objects.filter(issuer=F('pk')) # self-signed + + return render(request, 'cacheview/global_summary.html', { + 'roots': roots + }) + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/decorators.py b/rpki/gui/decorators.py new file mode 100644 index 00000000..69d20c46 --- /dev/null +++ b/rpki/gui/decorators.py @@ -0,0 +1,31 @@ +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django import http + + +def tls_required(f): + """Decorator which returns a 500 error if the connection is not secured + with TLS (https). + + """ + def _tls_required(request, *args, **kwargs): + if not request.is_secure(): + return http.HttpResponseServerError( + 'This resource may only be accessed securely via https', + content_type='text/plain') + return f(request, *args, **kwargs) + return _tls_required diff --git a/rpki/gui/default_settings.py b/rpki/gui/default_settings.py new file mode 100644 index 00000000..3859247c --- /dev/null +++ b/rpki/gui/default_settings.py @@ -0,0 +1,171 @@ +""" +This module contains static configuration settings for the web portal. +""" + +__version__ = '$Id$' + +import os +import random +import string +import socket + +import rpki.config +import rpki.autoconf + +# Where to put static files. +STATIC_ROOT = rpki.autoconf.datarootdir + '/rpki/media' + +# Must end with a slash! +STATIC_URL = '/media/' + +# Where to email server errors. +ADMINS = (('Administrator', 'root@localhost'),) + +LOGGING = { + 'version': 1, + 'formatters': { + 'verbose': { + # see http://docs.python.org/2.7/library/logging.html#logging.LogRecord + 'format': '%(levelname)s %(asctime)s %(name)s %(message)s' + }, + }, + 'handlers': { + 'stderr': { + 'class': 'logging.StreamHandler', + 'level': 'DEBUG', + 'formatter': 'verbose', + }, + 'mail_admins': { + 'level': 'ERROR', + 'class': 'django.utils.log.AdminEmailHandler', + }, + }, + 'loggers': { + 'django': { + 'level': 'ERROR', + 'handlers': ['stderr', 'mail_admins'], + }, + 'rpki.gui': { + 'level': 'WARNING', + 'handlers': ['stderr'], + }, + }, +} + +# Load the SQL authentication bits from the system rpki.conf. +rpki_config = rpki.config.parser(section='web_portal') + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.mysql', + 'NAME': rpki_config.get('sql-database'), + 'USER': rpki_config.get('sql-username'), + 'PASSWORD': rpki_config.get('sql-password'), + + # Ensure the default storage engine is InnoDB since we need + # foreign key support. The Django documentation suggests + # removing this after the syncdb is performed as an optimization, + # but there isn't an easy way to do this automatically. + + 'OPTIONS': { + 'init_command': 'SET storage_engine=INNODB', + } + } +} + + +def select_tz(): + "Find a supported timezone that looks like UTC" + for tz in ('UTC', 'GMT', 'Etc/UTC', 'Etc/GMT'): + if os.path.exists('/usr/share/zoneinfo/' + tz): + return tz + # Can't determine the proper timezone, fall back to UTC and let Django + # report the error to the user. + return 'UTC' + +# Local time zone for this installation. Choices can be found here: +# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name +# although not all choices may be available on all operating systems. +# If running in a Windows environment this must be set to the same as your +# system time zone. +TIME_ZONE = select_tz() + +def get_secret_key(): + """Retrieve the secret-key value from rpki.conf or generate a random value + if it is not present.""" + d = string.letters + string.digits + val = ''.join([random.choice(d) for _ in range(50)]) + return rpki_config.get('secret-key', val) + +# Make this unique, and don't share it with anybody. +SECRET_KEY = get_secret_key() + +# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts +# for details on why you might need this. +def get_allowed_hosts(): + allowed_hosts = set(rpki_config.multiget("allowed-hosts")) + allowed_hosts.add(socket.getfqdn()) + try: + import netifaces + for interface in netifaces.interfaces(): + addresses = netifaces.ifaddresses(interface) + for af in (netifaces.AF_INET, netifaces.AF_INET6): + if af in addresses: + for address in addresses[af]: + if "addr" in address: + allowed_hosts.add(address["addr"]) + except ImportError: + pass + return list(allowed_hosts) + +ALLOWED_HOSTS = get_allowed_hosts() + +# List of callables that know how to import templates from various sources. +TEMPLATE_LOADERS = ( + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + 'django.template.loaders.eggs.Loader' +) + +MIDDLEWARE_CLASSES = ( + 'django.middleware.common.CommonMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware' +) + +ROOT_URLCONF = 'rpki.gui.urls' + +INSTALLED_APPS = ( + 'django.contrib.auth', + #'django.contrib.admin', + #'django.contrib.admindocs', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.staticfiles', + 'rpki.irdb', + 'rpki.gui.app', + 'rpki.gui.cacheview', + 'rpki.gui.routeview', + 'south', +) + +TEMPLATE_CONTEXT_PROCESSORS = ( + "django.contrib.auth.context_processors.auth", + "django.core.context_processors.debug", + "django.core.context_processors.i18n", + "django.core.context_processors.media", + "django.contrib.messages.context_processors.messages", + "django.core.context_processors.request", + "django.core.context_processors.static" +) + +# Allow local site to override any setting above -- but if there's +# anything that local sites routinely need to modify, please consider +# putting that configuration into rpki.conf and just adding code here +# to read that configuration. +try: + from local_settings import * +except: + pass diff --git a/rpki/gui/models.py b/rpki/gui/models.py new file mode 100644 index 00000000..7a684f32 --- /dev/null +++ b/rpki/gui/models.py @@ -0,0 +1,150 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Common classes for reuse in apps. +""" + +__version__ = '$Id$' + +from django.db import models + +import rpki.resource_set +import rpki.POW +from south.modelsinspector import add_introspection_rules + + +class IPv6AddressField(models.Field): + "Field large enough to hold a 128-bit unsigned integer." + + __metaclass__ = models.SubfieldBase + + def db_type(self, connection): + return 'binary(16)' + + def to_python(self, value): + if isinstance(value, rpki.POW.IPAddress): + return value + return rpki.POW.IPAddress.fromBytes(value) + + def get_db_prep_value(self, value, connection, prepared): + """ + Note that we add a custom conversion to encode long values as hex + strings in SQL statements. See settings.get_conv() for details. + + """ + return value.toBytes() + + +class IPv4AddressField(models.Field): + "Wrapper around rpki.POW.IPAddress." + + __metaclass__ = models.SubfieldBase + + def db_type(self, connection): + return 'int UNSIGNED' + + def to_python(self, value): + if isinstance(value, rpki.POW.IPAddress): + return value + return rpki.POW.IPAddress(value, version=4) + + def get_db_prep_value(self, value, connection, prepared): + return long(value) + +add_introspection_rules( + [ + ([IPv4AddressField, IPv6AddressField], [], {}) + ], + ['^rpki\.gui\.models\.IPv4AddressField', + '^rpki\.gui\.models\.IPv6AddressField'] +) + + +class Prefix(models.Model): + """Common implementation for models with an IP address range. + + Expects that `range_cls` is set to the appropriate subclass of + rpki.resource_set.resource_range_ip.""" + + def as_resource_range(self): + """ + Returns the prefix as a rpki.resource_set.resource_range_ip object. + """ + return self.range_cls(self.prefix_min, self.prefix_max) + + @property + def prefixlen(self): + "Returns the prefix length for the prefix in this object." + return self.as_resource_range().prefixlen() + + def get_prefix_display(self): + "Return a string representatation of this IP prefix." + return str(self.as_resource_range()) + + def __unicode__(self): + """This method may be overridden by subclasses. The default + implementation calls get_prefix_display(). """ + return self.get_prefix_display() + + class Meta: + abstract = True + + # default sort order reflects what "sh ip bgp" outputs + ordering = ('prefix_min',) + + +class PrefixV4(Prefix): + "IPv4 Prefix." + + range_cls = rpki.resource_set.resource_range_ipv4 + + prefix_min = IPv4AddressField(db_index=True, null=False) + prefix_max = IPv4AddressField(db_index=True, null=False) + + class Meta(Prefix.Meta): + abstract = True + + +class PrefixV6(Prefix): + "IPv6 Prefix." + + range_cls = rpki.resource_set.resource_range_ipv6 + + prefix_min = IPv6AddressField(db_index=True, null=False) + prefix_max = IPv6AddressField(db_index=True, null=False) + + class Meta(Prefix.Meta): + abstract = True + + +class ASN(models.Model): + """Represents a range of ASNs. + + This model is abstract, and is intended to be reused by applications.""" + + min = models.PositiveIntegerField(null=False) + max = models.PositiveIntegerField(null=False) + + class Meta: + abstract = True + ordering = ('min', 'max') + + def as_resource_range(self): + return rpki.resource_set.resource_range_as(self.min, self.max) + + def __unicode__(self): + return u'AS%s' % self.as_resource_range() + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/routeview/__init__.py b/rpki/gui/routeview/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/rpki/gui/routeview/__init__.py diff --git a/rpki/gui/routeview/api.py b/rpki/gui/routeview/api.py new file mode 100644 index 00000000..cf699c9a --- /dev/null +++ b/rpki/gui/routeview/api.py @@ -0,0 +1,69 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import json +from django import http +from rpki.gui.routeview.models import RouteOrigin, RouteOriginV6 +from rpki import resource_set +import rpki.exceptions + +def route_list(request): + """Implements the REST query against the route models to allow the client + to search for routes. + + The only search currently supported is returning all the routes covered by + the prefix given in the 'prefix__in=' query string parameter. + + By default, only returns up to 10 matching routes, but the client may + request a different limit with the 'count=' query string parameter. + + """ + hard_limit = 100 + + if request.method == 'GET' and 'prefix__in' in request.GET: + # find all routers covered by this prefix + match_prefix = request.GET.get('prefix__in') + # max number of items to return + limit = request.GET.get('count', 10) + if limit < 1 or limit > hard_limit: + return http.HttpResponseBadRequest('invalid value for count parameter') + + try: + if ':' in match_prefix: + # v6 + pfx = resource_set.resource_range_ipv6.parse_str(match_prefix) + manager = RouteOriginV6 + else: + # v4 + pfx = resource_set.resource_range_ipv4.parse_str(match_prefix) + manager = RouteOrigin + except (AssertionError, rpki.exceptions.BadIPResource), e: + return http.HttpResponseBadRequest(e) + + try: + qs = manager.objects.filter(prefix_min__gte=pfx.min, + prefix_max__lte=pfx.max)[:limit] + # FIXME - a REST API should really return the url of the resource, + # but since we are combining two separate tables, the .pk is not a + # unique identifier. + matches = [{'prefix': str(x.as_resource_range()), 'asn': x.asn} for x in qs] + except IndexError: + # no matches + matches = [] + + return http.HttpResponse(json.dumps(matches), content_type='text/javascript') + + return http.HttpResponseBadRequest() diff --git a/rpki/gui/routeview/models.py b/rpki/gui/routeview/models.py new file mode 100644 index 00000000..052860c4 --- /dev/null +++ b/rpki/gui/routeview/models.py @@ -0,0 +1,81 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.db.models import PositiveIntegerField, permalink +import rpki.gui.models + + +class RouteOrigin(rpki.gui.models.PrefixV4): + "Represents an IPv4 BGP routing table entry." + + asn = PositiveIntegerField(help_text='origin AS', null=False) + + def __unicode__(self): + return u"AS%d's route origin for %s" % (self.asn, + self.get_prefix_display()) + + @property + def roas(self): + "Return a queryset of ROAs which cover this route." + return rpki.gui.cacheview.models.ROA.objects.filter( + prefixes__prefix_min__lte=self.prefix_min, + prefixes__prefix_max__gte=self.prefix_max + ) + + @property + def roa_prefixes(self): + "Return a queryset of ROA prefixes which cover this route." + return rpki.gui.cacheview.models.ROAPrefixV4.objects.filter( + prefix_min__lte=self.prefix_min, + prefix_max__gte=self.prefix_max + ) + + @property + def status(self): + "Returns the validation status of this route origin object." + roas = self.roas + # subselect exact match + if self.asn != 0 and roas.filter(asid=self.asn, prefixes__max_length__gte=self.prefixlen).exists(): + return 'valid' + elif roas.exists(): + return 'invalid' + return 'unknown' + + @permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.route_detail', [str(self.pk)]) + + class Meta: + # sort by increasing mask length (/16 before /24) + ordering = ('prefix_min', '-prefix_max') + + +class RouteOriginV6(rpki.gui.models.PrefixV6): + "Represents an IPv6 BGP routing table entry." + + asn = PositiveIntegerField(help_text='origin AS', null=False) + + def __unicode__(self): + return u"AS%d's route origin for %s" % (self.asn, + self.get_prefix_display()) + + class Meta: + ordering = ('prefix_min', '-prefix_max') + + +# this goes at the end of the file to avoid problems with circular imports +import rpki.gui.cacheview.models diff --git a/rpki/gui/routeview/util.py b/rpki/gui/routeview/util.py new file mode 100644 index 00000000..7884224c --- /dev/null +++ b/rpki/gui/routeview/util.py @@ -0,0 +1,236 @@ +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' +__all__ = ('import_routeviews_dump') + +import itertools +import _mysql_exceptions +import os.path +import subprocess +import time +import logging +import urlparse +from urllib import urlretrieve, unquote + +from django.db import transaction, connection + +from rpki.resource_set import resource_range_ipv4, resource_range_ipv6 +from rpki.exceptions import BadIPResource +import rpki.gui.app.timestamp + +# globals +logger = logging.getLogger(__name__) + +# Eventually this can be retrived from rpki.conf +DEFAULT_URL = 'http://archive.routeviews.org/oix-route-views/oix-full-snapshot-latest.dat.bz2' + +def parse_text(f): + last_prefix = None + cursor = connection.cursor() + range_class = resource_range_ipv4 + table = 'routeview_routeorigin' + sql = "INSERT INTO %s_new SET asn=%%s, prefix_min=%%s, prefix_max=%%s" % table + + try: + logger.info('Dropping existing staging table...') + cursor.execute('DROP TABLE IF EXISTS %s_new' % table) + except _mysql_exceptions.Warning: + pass + + logger.info('Creating staging table...') + cursor.execute('CREATE TABLE %(table)s_new LIKE %(table)s' % {'table': table}) + + logger.info('Disabling autocommit...') + cursor.execute('SET autocommit=0') + + logger.info('Adding rows to table...') + for row in itertools.islice(f, 5, None): + cols = row.split() + + # index -1 is i/e/? for igp/egp + origin_as = cols[-2] + # FIXME: skip AS_SETs + if origin_as[0] == '{': + continue + + prefix = cols[1] + + # validate the prefix since the "sh ip bgp" output is sometimes + # corrupt by no space between the prefix and the next hop IP + # address. + net, bits = prefix.split('/') + if len(bits) > 2: + s = ['mask for %s looks fishy...' % prefix] + prefix = '%s/%s' % (net, bits[0:2]) + s.append('assuming it should be %s' % prefix) + logger.warning(' '.join(s)) + + # the output may contain multiple paths to the same origin. + # if this is the same prefix as the last entry, we don't need + # to validate it again. + # + # prefixes are sorted, but the origin_as is not, so we keep a set to + # avoid duplicates, and insert into the db once we've seen all the + # origin_as values for a given prefix + if prefix != last_prefix: + # output routes for previous prefix + if last_prefix is not None: + try: + rng = range_class.parse_str(last_prefix) + rmin = long(rng.min) + rmax = long(rng.max) + cursor.executemany(sql, [(asn, rmin, rmax) for asn in asns]) + except BadIPResource: + logger.warning('skipping bad prefix: ' + last_prefix) + + asns = set() + last_prefix = prefix + + try: + asns.add(int(origin_as)) + except ValueError as err: + logger.warning('\n'.join( + ['unable to parse origin AS: ' + origin_as], + ['ValueError: ' + str(err)] + ['route entry was: ' + row], + )) + + logger.info('Committing...') + cursor.execute('COMMIT') + + try: + logger.info('Dropping old table...') + cursor.execute('DROP TABLE IF EXISTS %s_old' % table) + except _mysql_exceptions.Warning: + pass + + logger.info('Swapping staging table with live table...') + cursor.execute('RENAME TABLE %(table)s TO %(table)s_old, %(table)s_new TO %(table)s' % {'table': table}) + + transaction.commit_unless_managed() + + logger.info('Updating timestamp metadata...') + rpki.gui.app.timestamp.update('bgp_v4_import') + + +def parse_mrt(f): + # filter input through bgpdump + pipe = subprocess.Popen(['bgpdump', '-m', '-v', '-'], stdin=f, + stdout=subprocess.PIPE) + + last_prefix = None + last_as = None + for e in pipe.stdout.readlines(): + a = e.split('|') + prefix = a[5] + try: + origin_as = int(a[6].split()[-1]) + except ValueError: + # skip AS_SETs + continue + + if prefix != last_prefix: + last_prefix = prefix + elif last_as == origin_as: + continue + last_as = origin_as + + asns = PREFIXES.get(prefix) + if not asns: + asns = set() + PREFIXES[prefix] = asns + asns.add(origin_as) + + pipe.wait() + if pipe.returncode: + raise ProgException('bgpdump exited with code %d' % pipe.returncode) + + +class ProgException(Exception): + pass + + +class UnknownInputType(ProgException): + pass + + +class PipeFailed(ProgException): + pass + + +def import_routeviews_dump(filename=DEFAULT_URL, filetype='auto'): + """Load the oix-full-snapshot-latest.bz2 from routeview.org into the + rpki.gui.routeview database. + + Arguments: + + filename [optional]: the full path to the downloaded file to parse + + filetype [optional]: 'text' or 'mrt' + + """ + start_time = time.time() + + if filename.startswith('http://'): + #get filename from the basename of the URL + u = urlparse.urlparse(filename) + bname = os.path.basename(unquote(u.path)) + tmpname = os.path.join('/tmp', bname) + + logger.info("Downloading %s to %s" % (filename, tmpname)) + if os.path.exists(tmpname): + os.remove(tmpname) + # filename is replaced with a local filename containing cached copy of + # URL + filename, headers = urlretrieve(filename, tmpname) + + if filetype == 'auto': + # try to determine input type from filename, based on the default + # filenames from archive.routeviews.org + bname = os.path.basename(filename) + if bname.startswith('oix-full-snapshot-latest'): + filetype = 'text' + elif bname.startswith('rib.'): + filetype = 'mrt' + else: + raise UnknownInputType('unable to automatically determine input file type') + logging.info('Detected import format as "%s"' % filetype) + + pipe = None + if filename.endswith('.bz2'): + bunzip = 'bunzip2' + logging.info('Decompressing input file on the fly...') + pipe = subprocess.Popen([bunzip, '--stdout', filename], + stdout=subprocess.PIPE) + input_file = pipe.stdout + else: + input_file = open(filename) + + try: + dispatch = {'text': parse_text, 'mrt': parse_mrt} + dispatch[filetype](input_file) + except KeyError: + raise UnknownInputType('"%s" is an unknown input file type' % filetype) + + if pipe: + logging.debug('Waiting for child to exit...') + pipe.wait() + if pipe.returncode: + raise PipeFailed('Child exited code %d' % pipe.returncode) + pipe = None + else: + input_file.close() + + logger.info('Elapsed time %d secs' % (time.time() - start_time)) diff --git a/rpki/gui/script_util.py b/rpki/gui/script_util.py new file mode 100644 index 00000000..c3a864fd --- /dev/null +++ b/rpki/gui/script_util.py @@ -0,0 +1,43 @@ +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This module contains utility functions for use in standalone scripts. +""" + +from django.conf import settings + +from rpki import config +from rpki import autoconf + +__version__ = '$Id$' + + +def setup(): + """ + Configure Django enough to use the ORM. + """ + cfg = config.parser(section='web_portal') + # INSTALLED_APPS doesn't seem necessary so long as you are only accessing + # existing tables. + settings.configure( + DATABASES={ + 'default': { + 'ENGINE': 'django.db.backends.mysql', + 'NAME': cfg.get('sql-database'), + 'USER': cfg.get('sql-username'), + 'PASSWORD': cfg.get('sql-password'), + } + }, + ) diff --git a/rpki/gui/urls.py b/rpki/gui/urls.py new file mode 100644 index 00000000..955092f5 --- /dev/null +++ b/rpki/gui/urls.py @@ -0,0 +1,36 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls import patterns, include + +urlpatterns = patterns( + '', + + # Uncomment the admin/doc line below and add 'django.contrib.admindocs' + # to INSTALLED_APPS to enable admin documentation: + #(r'^admin/doc/', include('django.contrib.admindocs.urls')), + + # Uncomment the next line to enable the admin: + #(r'^admin/', include(admin.site.urls)), + + (r'^api/', include('rpki.gui.api.urls')), + (r'^cacheview/', include('rpki.gui.cacheview.urls')), + (r'^rpki/', include('rpki.gui.app.urls')), + + (r'^accounts/login/$', 'rpki.gui.views.login'), + (r'^accounts/logout/$', 'rpki.gui.views.logout', {'next_page': '/rpki/'}), +) diff --git a/rpki/gui/views.py b/rpki/gui/views.py new file mode 100644 index 00000000..404d6c7e --- /dev/null +++ b/rpki/gui/views.py @@ -0,0 +1,30 @@ +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import django.contrib.auth.views +from rpki.gui.decorators import tls_required + + +@tls_required +def login(request, *args, **kwargs): + "Wrapper around django.contrib.auth.views.login to force use of TLS." + return django.contrib.auth.views.login(request, *args, **kwargs) + + +@tls_required +def logout(request, *args, **kwargs): + "Wrapper around django.contrib.auth.views.logout to force use of TLS." + return django.contrib.auth.views.logout(request, *args, **kwargs) diff --git a/rpki/http.py b/rpki/http.py new file mode 100644 index 00000000..3c541f26 --- /dev/null +++ b/rpki/http.py @@ -0,0 +1,1070 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +HTTP utilities, both client and server. +""" + +import time +import socket +import asyncore +import asynchat +import urlparse +import sys +import random +import rpki.async +import rpki.sundial +import rpki.x509 +import rpki.exceptions +import rpki.log +import rpki.POW + +## @var rpki_content_type +# HTTP content type used for all RPKI messages. +rpki_content_type = "application/x-rpki" + +## @var debug_http +# Verbose chatter about HTTP streams. +debug_http = False + +## @var want_persistent_client +# Whether we want persistent HTTP client streams, when server also supports them. +want_persistent_client = False + +## @var want_persistent_server +# Whether we want persistent HTTP server streams, when client also supports them. +want_persistent_server = False + +## @var default_client_timeout +# Default HTTP client connection timeout. +default_client_timeout = rpki.sundial.timedelta(minutes = 5) + +## @var default_server_timeout +# Default HTTP server connection timeouts. Given our druthers, we'd +# prefer that the client close the connection, as this avoids the +# problem of client starting to reuse connection just as server closes +# it, so this should be longer than the client timeout. +default_server_timeout = rpki.sundial.timedelta(minutes = 10) + +## @var default_http_version +# Preferred HTTP version. +default_http_version = (1, 0) + +## @var default_tcp_port +# Default port for clients and servers that don't specify one. +default_tcp_port = 80 + +## @var enable_ipv6_servers +# Whether to enable IPv6 listeners. Enabled by default, as it should +# be harmless. Has no effect if kernel doesn't support IPv6. +enable_ipv6_servers = True + +## @var enable_ipv6_clients +# Whether to consider IPv6 addresses when making connections. +# Disabled by default, as IPv6 connectivity is still a bad joke in +# far too much of the world. +enable_ipv6_clients = False + +## @var have_ipv6 +# Whether the current machine claims to support IPv6. Note that just +# because the kernel supports it doesn't mean that the machine has +# usable IPv6 connectivity. I don't know of a simple portable way to +# probe for connectivity at runtime (the old test of "can you ping +# SRI-NIC.ARPA?" seems a bit dated...). Don't set this, it's set +# automatically by probing using the socket() system call at runtime. +try: + # pylint: disable=W0702,W0104 + socket.socket(socket.AF_INET6).close() + socket.IPPROTO_IPV6 + socket.IPV6_V6ONLY +except: + have_ipv6 = False +else: + have_ipv6 = True + +## @var use_adns + +# Whether to use rpki.adns code. This is still experimental, so it's +# not (yet) enabled by default. +use_adns = False +try: + import rpki.adns +except ImportError: + pass + +def supported_address_families(enable_ipv6): + """ + IP address families on which servers should listen, and to consider + when selecting addresses for client connections. + """ + if enable_ipv6 and have_ipv6: + return (socket.AF_INET, socket.AF_INET6) + else: + return (socket.AF_INET,) + +def localhost_addrinfo(): + """ + Return pseudo-getaddrinfo results for localhost. + """ + result = [(socket.AF_INET, "127.0.0.1")] + if enable_ipv6_clients and have_ipv6: + result.append((socket.AF_INET6, "::1")) + return result + +class http_message(object): + """ + Virtual class representing of one HTTP message. + """ + + software_name = "ISC RPKI library" + + def __init__(self, version = None, body = None, headers = None): + self.version = version + self.body = body + self.headers = headers + self.normalize_headers() + + def normalize_headers(self, headers = None): + """ + Clean up (some of) the horrible messes that HTTP allows in its + headers. + """ + if headers is None: + headers = () if self.headers is None else self.headers.items() + translate_underscore = True + else: + translate_underscore = False + result = {} + for k, v in headers: + if translate_underscore: + k = k.replace("_", "-") + k = "-".join(s.capitalize() for s in k.split("-")) + v = v.strip() + if k in result: + result[k] += ", " + v + else: + result[k] = v + self.headers = result + + @classmethod + def parse_from_wire(cls, headers): + """ + Parse and normalize an incoming HTTP message. + """ + self = cls() + headers = headers.split("\r\n") + self.parse_first_line(*headers.pop(0).split(None, 2)) + for i in xrange(len(headers) - 2, -1, -1): + if headers[i + 1][0].isspace(): + headers[i] += headers[i + 1] + del headers[i + 1] + self.normalize_headers([h.split(":", 1) for h in headers]) + return self + + def format(self): + """ + Format an outgoing HTTP message. + """ + s = self.format_first_line() + if self.body is not None: + assert isinstance(self.body, str) + self.headers["Content-Length"] = len(self.body) + for kv in self.headers.iteritems(): + s += "%s: %s\r\n" % kv + s += "\r\n" + if self.body is not None: + s += self.body + return s + + def __str__(self): + return self.format() + + def parse_version(self, version): + """ + Parse HTTP version, raise an exception if we can't. + """ + if version[:5] != "HTTP/": + raise rpki.exceptions.HTTPBadVersion, "Couldn't parse version %s" % version + self.version = tuple(int(i) for i in version[5:].split(".")) + + @property + def persistent(self): + """ + Figure out whether this HTTP message encourages a persistent connection. + """ + c = self.headers.get("Connection") + if self.version == (1, 1): + return c is None or "close" not in c.lower() + elif self.version == (1, 0): + return c is not None and "keep-alive" in c.lower() + else: + return False + +class http_request(http_message): + """ + HTTP request message. + """ + + def __init__(self, cmd = None, path = None, version = default_http_version, body = None, callback = None, errback = None, **headers): + assert cmd == "POST" or body is None + http_message.__init__(self, version = version, body = body, headers = headers) + self.cmd = cmd + self.path = path + self.callback = callback + self.errback = errback + self.retried = False + + def parse_first_line(self, cmd, path, version): + """ + Parse first line of HTTP request message. + """ + self.parse_version(version) + self.cmd = cmd + self.path = path + + def format_first_line(self): + """ + Format first line of HTTP request message, and set up the + User-Agent header. + """ + self.headers.setdefault("User-Agent", self.software_name) + return "%s %s HTTP/%d.%d\r\n" % (self.cmd, self.path, self.version[0], self.version[1]) + + def __repr__(self): + return rpki.log.log_repr(self, self.cmd, self.path) + +class http_response(http_message): + """ + HTTP response message. + """ + + def __init__(self, code = None, reason = None, version = default_http_version, body = None, **headers): + http_message.__init__(self, version = version, body = body, headers = headers) + self.code = code + self.reason = reason + + def parse_first_line(self, version, code, reason): + """ + Parse first line of HTTP response message. + """ + self.parse_version(version) + self.code = int(code) + self.reason = reason + + def format_first_line(self): + """ + Format first line of HTTP response message, and set up Date and + Server headers. + """ + self.headers.setdefault("Date", time.strftime("%a, %d %b %Y %T GMT")) + self.headers.setdefault("Server", self.software_name) + return "HTTP/%d.%d %s %s\r\n" % (self.version[0], self.version[1], self.code, self.reason) + + def __repr__(self): + return rpki.log.log_repr(self, self.code, self.reason) + +def log_method(self, msg, logger = rpki.log.debug): + """ + Logging method used in several different classes. + """ + assert isinstance(logger, rpki.log.logger) + if debug_http or logger is not rpki.log.debug: + logger("%r: %s" % (self, msg)) + +def addr_to_string(addr): + """ + Convert socket addr tuple to printable string. Assumes 2-element + tuple is IPv4, 4-element tuple is IPv6, throws TypeError for + anything else. + """ + + if len(addr) == 2: + return "%s:%d" % (addr[0], addr[1]) + if len(addr) == 4: + return "%s.%d" % (addr[0], addr[1]) + raise TypeError + +class http_stream(asynchat.async_chat): + """ + Virtual class representing an HTTP message stream. + """ + + log = log_method + + def __repr__(self): + status = ["connected"] if self.connected else [] + try: + status.append(addr_to_string(self.addr)) + except TypeError: + pass + return rpki.log.log_repr(self, *status) + + def __init__(self, sock = None): + asynchat.async_chat.__init__(self, sock) + self.buffer = [] + self.timer = rpki.async.timer(self.handle_timeout) + self.restart() + + def restart(self): + """ + (Re)start HTTP message parser, reset timer. + """ + assert not self.buffer + self.chunk_handler = None + self.set_terminator("\r\n\r\n") + self.update_timeout() + + def update_timeout(self): + """ + Put this stream's timer in known good state: set it to the + stream's timeout value if we're doing timeouts, otherwise clear + it. + """ + if self.timeout is not None: + self.log("Setting timeout %s" % self.timeout) + self.timer.set(self.timeout) + else: + self.log("Clearing timeout") + self.timer.cancel() + + def collect_incoming_data(self, data): + """ + Buffer incoming data from asynchat. + """ + self.buffer.append(data) + self.update_timeout() + + def get_buffer(self): + """ + Consume data buffered from asynchat. + """ + val = "".join(self.buffer) + self.buffer = [] + return val + + def found_terminator(self): + """ + Asynchat reported that it found whatever terminator we set, so + figure out what to do next. This can be messy, because we can be + in any of several different states: + + @li We might be handling chunked HTTP, in which case we have to + initialize the chunk decoder; + + @li We might have found the end of the message body, in which case + we can (finally) process it; or + + @li We might have just gotten to the end of the message headers, + in which case we have to parse them to figure out which of three + separate mechanisms (chunked, content-length, TCP close) is going + to tell us how to find the end of the message body. + """ + self.update_timeout() + if self.chunk_handler: + self.chunk_handler() + elif not isinstance(self.get_terminator(), str): + self.handle_body() + else: + self.msg = self.parse_type.parse_from_wire(self.get_buffer()) + if self.msg.version == (1, 1) and "chunked" in self.msg.headers.get("Transfer-Encoding", "").lower(): + self.msg.body = [] + self.chunk_handler = self.chunk_header + self.set_terminator("\r\n") + elif "Content-Length" in self.msg.headers: + self.set_terminator(int(self.msg.headers["Content-Length"])) + else: + self.handle_no_content_length() + + def chunk_header(self): + """ + Asynchat just handed us what should be the header of one chunk of + a chunked encoding stream. If this chunk has a body, set the + stream up to read it; otherwise, this is the last chunk, so start + the process of exiting the chunk decoder. + """ + n = int(self.get_buffer().partition(";")[0], 16) + self.log("Chunk length %s" % n) + if n: + self.chunk_handler = self.chunk_body + self.set_terminator(n) + else: + self.msg.body = "".join(self.msg.body) + self.chunk_handler = self.chunk_discard_trailer + + def chunk_body(self): + """ + Asynchat just handed us what should be the body of a chunk of the + body of a chunked message (sic). Save it, and prepare to move on + to the next chunk. + """ + self.log("Chunk body") + self.msg.body += self.buffer + self.buffer = [] + self.chunk_handler = self.chunk_discard_crlf + self.set_terminator("\r\n") + + def chunk_discard_crlf(self): + """ + Consume the CRLF that terminates a chunk, reinitialize chunk + decoder to be ready for the next chunk. + """ + self.log("Chunk CRLF") + s = self.get_buffer() + assert s == "", "%r: Expected chunk CRLF, got '%s'" % (self, s) + self.chunk_handler = self.chunk_header + + def chunk_discard_trailer(self): + """ + Consume chunk trailer, which should be empty, then (finally!) exit + the chunk decoder and hand complete message off to the application. + """ + self.log("Chunk trailer") + s = self.get_buffer() + assert s == "", "%r: Expected end of chunk trailers, got '%s'" % (self, s) + self.chunk_handler = None + self.handle_message() + + def handle_body(self): + """ + Hand normal (not chunked) message off to the application. + """ + self.msg.body = self.get_buffer() + self.handle_message() + + def handle_error(self): + """ + Asynchat (or asyncore, or somebody) raised an exception. See + whether it's one we should just pass along, otherwise log a stack + trace and close the stream. + """ + self.timer.cancel() + etype = sys.exc_info()[0] + if etype in (SystemExit, rpki.async.ExitNow): + raise + rpki.log.traceback() + if etype is not rpki.exceptions.HTTPClientAborted: + self.log("Closing due to error", rpki.log.warn) + self.close() + + def handle_timeout(self): + """ + Inactivity timer expired, close connection with prejudice. + """ + self.log("Timeout, closing") + self.close() + + def handle_close(self): + """ + Wrapper around asynchat connection close handler, so that we can + log the event, cancel timer, and so forth. + """ + self.log("Close event in HTTP stream handler") + self.timer.cancel() + asynchat.async_chat.handle_close(self) + +class http_server(http_stream): + """ + HTTP server stream. + """ + + ## @var parse_type + # Stream parser should look for incoming HTTP request messages. + parse_type = http_request + + ## @var timeout + # Use the default server timeout value set in the module header. + timeout = default_server_timeout + + def __init__(self, sock, handlers): + self.handlers = handlers + http_stream.__init__(self, sock = sock) + self.expect_close = not want_persistent_server + self.log("Starting") + + def handle_no_content_length(self): + """ + Handle an incoming message that used neither chunking nor a + Content-Length header (that is: this message will be the last one + in this server stream). No special action required. + """ + self.handle_message() + + def find_handler(self, path): + """ + Helper method to search self.handlers. + """ + for s, h in self.handlers: + if path.startswith(s): + return h + return None + + def handle_message(self): + """ + HTTP layer managed to deliver a complete HTTP request to + us, figure out what to do with it. Check the command and + Content-Type, look for a handler, and if everything looks right, + pass the message body, path, and a reply callback to the handler. + """ + self.log("Received request %r" % self.msg) + if not self.msg.persistent: + self.expect_close = True + handler = self.find_handler(self.msg.path) + error = None + if self.msg.cmd != "POST": + error = 501, "No handler for method %s" % self.msg.cmd + elif self.msg.headers["Content-Type"] != rpki_content_type: + error = 415, "No handler for Content-Type %s" % self.headers["Content-Type"] + elif handler is None: + error = 404, "No handler for URL %s" % self.msg.path + if error is None: + try: + handler(self.msg.body, self.msg.path, self.send_reply) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + self.send_error(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + else: + self.send_error(code = error[0], reason = error[1]) + + def send_error(self, code, reason): + """ + Send an error response to this request. + """ + self.send_message(code = code, reason = reason) + + def send_reply(self, code, body = None, reason = "OK"): + """ + Send a reply to this request. + """ + self.send_message(code = code, body = body, reason = reason) + + def send_message(self, code, reason = "OK", body = None): + """ + Queue up reply message. If both parties agree that connection is + persistant, and if no error occurred, restart this stream to + listen for next message; otherwise, queue up a close event for + this stream so it will shut down once the reply has been sent. + """ + self.log("Sending response %s %s" % (code, reason)) + if code >= 400: + self.expect_close = True + msg = http_response(code = code, reason = reason, body = body, + Content_Type = rpki_content_type, + Connection = "Close" if self.expect_close else "Keep-Alive") + self.push(msg.format()) + if self.expect_close: + self.log("Closing") + self.timer.cancel() + self.close_when_done() + else: + self.log("Listening for next message") + self.restart() + +class http_listener(asyncore.dispatcher): + """ + Listener for incoming HTTP connections. + """ + + log = log_method + + def __repr__(self): + try: + status = (addr_to_string(self.addr),) + except TypeError: + status = () + return rpki.log.log_repr(self, *status) + + def __init__(self, handlers, addrinfo): + asyncore.dispatcher.__init__(self) + self.handlers = handlers + try: + af, socktype, proto, canonname, sockaddr = addrinfo # pylint: disable=W0612 + self.create_socket(af, socktype) + self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + try: + self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except AttributeError: + pass + if have_ipv6 and af == socket.AF_INET6: + self.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + self.bind(sockaddr) + self.listen(5) + except Exception, e: + self.log("Couldn't set up HTTP listener: %s" % e, rpki.log.warn) + rpki.log.traceback() + self.close() + for h in handlers: + self.log("Handling %s" % h[0]) + + def handle_accept(self): + """ + Asyncore says we have an incoming connection, spawn an http_server + stream for it and pass along all of our handler data. + """ + try: + s, c = self.accept() + self.log("Accepting connection from %s" % addr_to_string(c)) + http_server(sock = s, handlers = self.handlers) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + self.log("Unable to accept connection: %s" % e) + self.handle_error() + + def handle_error(self): + """ + Asyncore signaled an error, pass it along or log it. + """ + if sys.exc_info()[0] in (SystemExit, rpki.async.ExitNow): + raise + self.log("Error in HTTP listener", rpki.log.warn) + rpki.log.traceback() + +class http_client(http_stream): + """ + HTTP client stream. + """ + + ## @var parse_type + # Stream parser should look for incoming HTTP response messages. + parse_type = http_response + + ## @var timeout + # Use the default client timeout value set in the module header. + timeout = default_client_timeout + + ## @var state + # Application layer connection state. + state = None + + def __init__(self, queue, hostport): + self.log("Creating new connection to %s" % addr_to_string(hostport)) + http_stream.__init__(self) + self.queue = queue + self.host = hostport[0] + self.port = hostport[1] + self.set_state("opening") + self.expect_close = not want_persistent_client + + def start(self): + """ + Create socket and request a connection. + """ + if not use_adns: + self.log("Not using ADNS") + self.gotaddrinfo([(socket.AF_INET, self.host)]) + elif self.host == "localhost": + self.log("Bypassing DNS for localhost") + self.gotaddrinfo(localhost_addrinfo()) + else: + families = supported_address_families(enable_ipv6_clients) + self.log("Starting ADNS lookup for %s in families %r" % (self.host, families)) + rpki.adns.getaddrinfo(self.gotaddrinfo, self.dns_error, self.host, families) + + def dns_error(self, e): + """ + Handle DNS lookup errors. For now, just whack the connection. + Undoubtedly we should do something better with diagnostics here. + """ + self.handle_error() + + def gotaddrinfo(self, addrinfo): + """ + Got address data from DNS, create socket and request connection. + """ + try: + self.af, self.address = random.choice(addrinfo) + self.log("Connecting to AF %s host %s port %s addr %s" % (self.af, self.host, self.port, self.address)) + self.create_socket(self.af, socket.SOCK_STREAM) + self.connect((self.address, self.port)) + if self.addr is None: + self.addr = (self.host, self.port) + self.update_timeout() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + self.handle_error() + + def handle_connect(self): + """ + Asyncore says socket has connected. + """ + self.log("Socket connected") + self.set_state("idle") + assert self.queue.client is self + self.queue.send_request() + + def set_state(self, state): + """ + Set HTTP client connection state. + """ + self.log("State transition %s => %s" % (self.state, state)) + self.state = state + + def handle_no_content_length(self): + """ + Handle response message that used neither chunking nor a + Content-Length header (that is: this message will be the last one + in this server stream). In this case we want to read until we + reach the end of the data stream. + """ + self.set_terminator(None) + + def send_request(self, msg): + """ + Queue up request message and kickstart connection. + """ + self.log("Sending request %r" % msg) + assert self.state == "idle", "%r: state should be idle, is %s" % (self, self.state) + self.set_state("request-sent") + msg.headers["Connection"] = "Close" if self.expect_close else "Keep-Alive" + self.push(msg.format()) + self.restart() + + def handle_message(self): + """ + Handle incoming HTTP response message. Make sure we're in a state + where we expect to see such a message (and allow the mysterious + empty messages that Apache sends during connection close, no idea + what that is supposed to be about). If everybody agrees that the + connection should stay open, put it into an idle state; otherwise, + arrange for the stream to shut down. + """ + + self.log("Message received, state %s" % self.state) + + if not self.msg.persistent: + self.expect_close = True + + if self.state != "request-sent": + if self.state == "closing": + assert not self.msg.body + self.log("Ignoring empty response received while closing") + return + raise rpki.exceptions.HTTPUnexpectedState, "%r received message while in unexpected state %s" % (self, self.state) + + if self.expect_close: + self.log("Closing") + self.set_state("closing") + self.close_when_done() + else: + self.log("Idling") + self.set_state("idle") + self.update_timeout() + + if self.msg.code != 200: + errmsg = "HTTP request failed" + if self.msg.code is not None: + errmsg += " with status %s" % self.msg.code + if self.msg.reason: + errmsg += ", reason %s" % self.msg.reason + if self.msg.body: + errmsg += ", response %s" % self.msg.body + raise rpki.exceptions.HTTPRequestFailed(errmsg) + self.queue.return_result(self, self.msg, detach = self.expect_close) + + def handle_close(self): + """ + Asyncore signaled connection close. If we were waiting for that + to find the end of a response message, process the resulting + message now; if we were waiting for the response to a request we + sent, signal the error. + """ + http_stream.handle_close(self) + self.log("State %s" % self.state) + if self.get_terminator() is None: + self.handle_body() + elif self.state == "request-sent": + raise rpki.exceptions.HTTPClientAborted, "HTTP request aborted by close event" + else: + self.queue.detach(self) + + def handle_timeout(self): + """ + Connection idle timer has expired. Shut down connection in any + case, noisily if we weren't idle. + """ + bad = self.state not in ("idle", "closing") + if bad: + self.log("Timeout while in state %s" % self.state, rpki.log.warn) + http_stream.handle_timeout(self) + if bad: + try: + raise rpki.exceptions.HTTPTimeout + except: # pylint: disable=W0702 + self.handle_error() + else: + self.queue.detach(self) + + def handle_error(self): + """ + Asyncore says something threw an exception. Log it, then shut + down the connection and pass back the exception. + """ + eclass, edata = sys.exc_info()[0:2] + self.log("Error on HTTP client connection %s:%s %s %s" % (self.host, self.port, eclass, edata), rpki.log.warn) + http_stream.handle_error(self) + self.queue.return_result(self, edata, detach = True) + +class http_queue(object): + """ + Queue of pending HTTP requests for a single destination. This class + is very tightly coupled to http_client; http_client handles the HTTP + stream itself, this class provides a slightly higher-level API. + """ + + log = log_method + + def __repr__(self): + return rpki.log.log_repr(self, addr_to_string(self.hostport)) + + def __init__(self, hostport): + self.hostport = hostport + self.client = None + self.log("Created") + self.queue = [] + + def request(self, *requests): + """ + Append http_request object(s) to this queue. + """ + self.log("Adding requests %r" % requests) + self.queue.extend(requests) + + def restart(self): + """ + Send next request for this queue, if we can. This may involve + starting a new http_client stream, reusing an existing idle + stream, or just ignoring this request if there's an active client + stream already; in the last case, handling of the response (or + exception, or timeout) for the query currently in progress will + call this method when it's time to kick out the next query. + """ + try: + if self.client is None: + self.client = http_client(self, self.hostport) + self.log("Attached client %r" % self.client) + self.client.start() + elif self.client.state == "idle": + self.log("Sending request to existing client %r" % self.client) + self.send_request() + else: + self.log("Client %r exists in state %r" % (self.client, self.client.state)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + self.return_result(self.client, e, detach = True) + + def send_request(self): + """ + Kick out the next query in this queue, if any. + """ + if self.queue: + self.client.send_request(self.queue[0]) + + def detach(self, client_): + """ + Detatch a client from this queue. Silently ignores attempting to + detach a client that is not attached to this queue, to simplify + handling of what otherwise would be a nasty set of race + conditions. + """ + if client_ is self.client: + self.log("Detaching client %r" % client_) + self.client = None + + def return_result(self, client, result, detach = False): # pylint: disable=W0621 + """ + Client stream has returned a result, which we need to pass along + to the original caller. Result may be either an HTTP response + message or an exception. In either case, once we're done + processing this result, kick off next message in the queue, if any. + """ + + if client is not self.client: + self.log("Wrong client trying to return result. THIS SHOULD NOT HAPPEN. Dropping result %r" % result, rpki.log.warn) + return + + if detach: + self.detach(client) + + try: + req = self.queue.pop(0) + self.log("Dequeuing request %r" % req) + except IndexError: + self.log("No caller. THIS SHOULD NOT HAPPEN. Dropping result %r" % result, rpki.log.warn) + return + + assert isinstance(result, http_response) or isinstance(result, Exception) + + if isinstance(result, http_response): + try: + self.log("Returning result %r to caller" % result) + req.callback(result.body) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + result = e + + if isinstance(result, Exception): + try: + self.log("Returning exception %r to caller: %s" % (result, result), rpki.log.warn) + req.errback(result) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + # + # If we get here, we may have lost the event chain. Not + # obvious what we can do about it at this point, but force a + # traceback so that it will be somewhat obvious that something + # really bad happened. + # + self.log("Exception in exception callback", rpki.log.warn) + rpki.log.traceback(True) + + self.log("Queue: %r" % self.queue) + + if self.queue: + self.restart() + +## @var client_queues +# Map of (host, port) tuples to http_queue objects. +client_queues = {} + +def client(msg, url, callback, errback): + """ + Open client HTTP connection, send a message, set up callbacks to + handle response. + """ + + u = urlparse.urlparse(url) + + if (u.scheme not in ("", "http") or + u.username is not None or + u.password is not None or + u.params != "" or + u.query != "" or + u.fragment != ""): + raise rpki.exceptions.BadClientURL, "Unusable URL %s" % url + + if debug_http: + rpki.log.debug("Contacting %s" % url) + + request = http_request( + cmd = "POST", + path = u.path, + body = msg, + callback = callback, + errback = errback, + Host = u.hostname, + Content_Type = rpki_content_type) + + hostport = (u.hostname or "localhost", u.port or default_tcp_port) + + if debug_http: + rpki.log.debug("Created request %r for %s" % (request, addr_to_string(hostport))) + if hostport not in client_queues: + client_queues[hostport] = http_queue(hostport) + client_queues[hostport].request(request) + + # Defer connection attempt until after we've had time to process any + # pending I/O events, in case connections have closed. + + if debug_http: + rpki.log.debug("Scheduling connection startup for %r" % request) + rpki.async.event_defer(client_queues[hostport].restart) + +def server(handlers, port, host = ""): + """ + Run an HTTP server and wait (forever) for connections. + """ + + if not isinstance(handlers, (tuple, list)): + handlers = (("/", handlers),) + + # Yes, this is sick. So is getaddrinfo() returning duplicate + # records, which RedHat has the gall to claim is a feature. + ai = [] + for af in supported_address_families(enable_ipv6_servers): + try: + if host: + h = host + elif have_ipv6 and af == socket.AF_INET6: + h = "::" + else: + h = "0.0.0.0" + for a in socket.getaddrinfo(h, port, af, socket.SOCK_STREAM): + if a not in ai: + ai.append(a) + except socket.gaierror: + pass + + for a in ai: + http_listener(addrinfo = a, handlers = handlers) + + rpki.async.event_loop() + +class caller(object): + """ + Handle client-side mechanics for protocols based on HTTP, CMS, and + rpki.xml_utils. Calling sequence is intended to nest within + rpki.async.sync_wrapper. + """ + + debug = False + + def __init__(self, proto, client_key, client_cert, server_ta, server_cert, url, debug = None): + self.proto = proto + self.client_key = client_key + self.client_cert = client_cert + self.server_ta = server_ta + self.server_cert = server_cert + self.url = url + self.cms_timestamp = None + if debug is not None: + self.debug = debug + + def __call__(self, cb, eb, *pdus): + + def done(r_der): + """ + Handle CMS-wrapped XML response message. + """ + try: + r_cms = self.proto.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((self.server_ta, self.server_cert)) + self.cms_timestamp = r_cms.check_replay(self.cms_timestamp, self.url) + if self.debug: + print "<!-- Reply -->" + print r_cms.pretty_print_content() + cb(r_msg) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + eb(e) + + q_msg = self.proto.msg.query(*pdus) + q_cms = self.proto.cms_msg() + q_der = q_cms.wrap(q_msg, self.client_key, self.client_cert) + if self.debug: + print "<!-- Query -->" + print q_cms.pretty_print_content() + + client(url = self.url, msg = q_der, callback = done, errback = eb) diff --git a/rpki/ipaddrs.py b/rpki/ipaddrs.py new file mode 100644 index 00000000..c1855302 --- /dev/null +++ b/rpki/ipaddrs.py @@ -0,0 +1,137 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Classes to represent IP addresses. These are mostly obsolete at this +point, having been replaced by the rpki.POW.IPAddress class, but there +may still be some code using these, so keep them for now for backwards +compatability. + +Given some of the other operations we need to perform on them, it's +most convenient to represent IP addresses as Python "long" values. +The classes in this module just wrap suitable read/write syntax around +the underlying "long" type. + +These classes also supply a "bits" attribute for use by other code +built on these classes; for the most part, IPv6 addresses really are +just IPv4 addresses with more bits, so we supply the number of bits +once, here, thus avoiding a lot of duplicate code elsewhere. +""" + +import socket, struct + +class v4addr(long): + """ + IPv4 address. + + Derived from long, but supports IPv4 print syntax. + """ + + bits = 32 + ipversion = 4 + + def __new__(cls, x): + """ + Construct a v4addr object. + """ + if isinstance(x, unicode): + x = x.encode("ascii") + if isinstance(x, str): + return cls.from_bytes(socket.inet_pton(socket.AF_INET, ".".join(str(int(i)) for i in x.split(".")))) + else: + return long.__new__(cls, x) + + def to_bytes(self): + """ + Convert a v4addr object to a raw byte string. + """ + return struct.pack("!I", long(self)) + + @classmethod + def from_bytes(cls, x): + """ + Convert from a raw byte string to a v4addr object. + """ + return cls(struct.unpack("!I", x)[0]) + + def __str__(self): + """ + Convert a v4addr object to string format. + """ + return socket.inet_ntop(socket.AF_INET, self.to_bytes()) + +class v6addr(long): + """ + IPv6 address. + + Derived from long, but supports IPv6 print syntax. + """ + + bits = 128 + ipversion = 6 + + def __new__(cls, x): + """ + Construct a v6addr object. + """ + if isinstance(x, unicode): + x = x.encode("ascii") + if isinstance(x, str): + return cls.from_bytes(socket.inet_pton(socket.AF_INET6, x)) + else: + return long.__new__(cls, x) + + def to_bytes(self): + """ + Convert a v6addr object to a raw byte string. + """ + return struct.pack("!QQ", long(self) >> 64, long(self) & 0xFFFFFFFFFFFFFFFF) + + @classmethod + def from_bytes(cls, x): + """ + Convert from a raw byte string to a v6addr object. + """ + x = struct.unpack("!QQ", x) + return cls((x[0] << 64) | x[1]) + + def __str__(self): + """ + Convert a v6addr object to string format. + """ + return socket.inet_ntop(socket.AF_INET6, self.to_bytes()) + +def parse(s): + """ + Parse a string as either an IPv4 or IPv6 address, and return object of appropriate class. + """ + if isinstance(s, unicode): + s = s.encode("ascii") + return v6addr(s) if ":" in s else v4addr(s) diff --git a/rpki/irdb/__init__.py b/rpki/irdb/__init__.py new file mode 100644 index 00000000..cc83387e --- /dev/null +++ b/rpki/irdb/__init__.py @@ -0,0 +1,26 @@ +# $Id$ +# +# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Django really wants its models packaged up in a "models" module within a +Python package, so humor it. +""" + +# pylint: disable=W0401 + +from rpki.irdb.models import * +from rpki.irdb.zookeeper import Zookeeper +from rpki.irdb.router import DBContextRouter, database diff --git a/rpki/irdb/models.py b/rpki/irdb/models.py new file mode 100644 index 00000000..1ad9b4e3 --- /dev/null +++ b/rpki/irdb/models.py @@ -0,0 +1,646 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2011--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Internet Registry (IR) Database, Django-style. + +This is the back-end code's interface to the database. It's intended +to be usable by command line programs and other scripts, not just +Django GUI code, so be careful. +""" + +# pylint: disable=W0232 + +import django.db.models +import rpki.x509 +import rpki.sundial +import rpki.resource_set +import socket +import rpki.POW +from south.modelsinspector import add_introspection_rules + +## @var ip_version_choices +# Choice argument for fields implementing IP version numbers. + +ip_version_choices = ((4, "IPv4"), (6, "IPv6")) + +## @var ca_certificate_lifetime +# Lifetime for a BPKI CA certificate. + +ca_certificate_lifetime = rpki.sundial.timedelta(days = 3652) + +## @var crl_interval + +# Expected interval between BPKI CRL updates. This should be a little +# longer than the real regeneration cycle, so that the old CRL will +# not go stale while we're generating the new one. Eg, if we +# regenerate daily, an interval of 24 hours is too short, but 25 hours +# would be OK, as would 24 hours and 30 minutes. + +crl_interval = rpki.sundial.timedelta(hours = 25) + +## @var ee_certificate_lifetime +# Lifetime for a BPKI EE certificate. + +ee_certificate_lifetime = rpki.sundial.timedelta(days = 60) + +### + +# Field types + +class HandleField(django.db.models.CharField): + """ + A handle field type. + """ + + description = 'A "handle" in one of the RPKI protocols' + + def __init__(self, *args, **kwargs): + kwargs["max_length"] = 120 + django.db.models.CharField.__init__(self, *args, **kwargs) + +class EnumField(django.db.models.PositiveSmallIntegerField): + """ + An enumeration type that uses strings in Python and small integers + in SQL. + """ + + description = "An enumeration type" + + __metaclass__ = django.db.models.SubfieldBase + + def __init__(self, *args, **kwargs): + if isinstance(kwargs.get("choices"), (tuple, list)) and isinstance(kwargs["choices"][0], str): + kwargs["choices"] = tuple(enumerate(kwargs["choices"], 1)) + django.db.models.PositiveSmallIntegerField.__init__(self, *args, **kwargs) + self.enum_i2s = dict(self.flatchoices) + self.enum_s2i = dict((v, k) for k, v in self.flatchoices) + + def to_python(self, value): + return self.enum_i2s.get(value, value) + + def get_prep_value(self, value): + return self.enum_s2i.get(value, value) + +class SundialField(django.db.models.DateTimeField): + """ + A field type for our customized datetime objects. + """ + __metaclass__ = django.db.models.SubfieldBase + + description = "A datetime type using our customized datetime objects" + + def to_python(self, value): + if isinstance(value, rpki.sundial.pydatetime.datetime): + return rpki.sundial.datetime.from_datetime( + django.db.models.DateTimeField.to_python(self, value)) + else: + return value + + def get_prep_value(self, value): + if isinstance(value, rpki.sundial.datetime): + return value.to_datetime() + else: + return value + + +class DERField(django.db.models.Field): + """ + Field types for DER objects. + """ + + __metaclass__ = django.db.models.SubfieldBase + + def __init__(self, *args, **kwargs): + kwargs["serialize"] = False + kwargs["blank"] = True + kwargs["default"] = None + django.db.models.Field.__init__(self, *args, **kwargs) + + def db_type(self, connection): + if connection.settings_dict['ENGINE'] == "django.db.backends.posgresql": + return "bytea" + else: + return "BLOB" + + def to_python(self, value): + assert value is None or isinstance(value, (self.rpki_type, str)) + if isinstance(value, str): + return self.rpki_type(DER = value) + else: + return value + + def get_prep_value(self, value): + assert value is None or isinstance(value, (self.rpki_type, str)) + if isinstance(value, self.rpki_type): + return value.get_DER() + else: + return value + +class CertificateField(DERField): + description = "X.509 certificate" + rpki_type = rpki.x509.X509 + +class RSAKeyField(DERField): + description = "RSA keypair" + rpki_type = rpki.x509.RSA + +class CRLField(DERField): + description = "Certificate Revocation List" + rpki_type = rpki.x509.CRL + +class PKCS10Field(DERField): + description = "PKCS #10 certificate request" + rpki_type = rpki.x509.PKCS10 + +class SignedReferralField(DERField): + description = "CMS signed object containing XML" + rpki_type = rpki.x509.SignedReferral + + +# Custom managers + +class CertificateManager(django.db.models.Manager): + + def get_or_certify(self, **kwargs): + """ + Sort of like .get_or_create(), but for models containing + certificates which need to be generated based on other fields. + + Takes keyword arguments like .get(), checks for existing object. + If none, creates a new one; if found an existing object but some + of the non-key fields don't match, updates the existing object. + Runs certification method for new or updated objects. Returns a + tuple consisting of the object and a boolean indicating whether + anything has changed. + """ + + changed = False + + try: + obj = self.get(**self._get_or_certify_keys(kwargs)) + + except self.model.DoesNotExist: + obj = self.model(**kwargs) + changed = True + + else: + for k in kwargs: + if getattr(obj, k) != kwargs[k]: + setattr(obj, k, kwargs[k]) + changed = True + + if changed: + obj.avow() + obj.save() + + return obj, changed + + def _get_or_certify_keys(self, kwargs): + assert len(self.model._meta.unique_together) == 1 + return dict((k, kwargs[k]) for k in self.model._meta.unique_together[0]) + +class ResourceHolderCAManager(CertificateManager): + def _get_or_certify_keys(self, kwargs): + return { "handle" : kwargs["handle"] } + +class ServerCAManager(CertificateManager): + def _get_or_certify_keys(self, kwargs): + return { "pk" : 1 } + +class ResourceHolderEEManager(CertificateManager): + def _get_or_certify_keys(self, kwargs): + return { "issuer" : kwargs["issuer"] } + +### + +class CA(django.db.models.Model): + certificate = CertificateField() + private_key = RSAKeyField() + latest_crl = CRLField() + + # Might want to bring these into line with what rpkid does. Current + # variables here were chosen to map easily to what OpenSSL command + # line tool was keeping on disk. + + next_serial = django.db.models.BigIntegerField(default = 1) + next_crl_number = django.db.models.BigIntegerField(default = 1) + last_crl_update = SundialField() + next_crl_update = SundialField() + + class Meta: + abstract = True + + def avow(self): + if self.private_key is None: + self.private_key = rpki.x509.RSA.generate(quiet = True) + now = rpki.sundial.now() + notAfter = now + ca_certificate_lifetime + self.certificate = rpki.x509.X509.bpki_self_certify( + keypair = self.private_key, + subject_name = self.subject_name, + serial = self.next_serial, + now = now, + notAfter = notAfter) + self.next_serial += 1 + self.generate_crl() + return self.certificate + + def certify(self, subject_name, subject_key, validity_interval, is_ca, pathLenConstraint = None): + now = rpki.sundial.now() + notAfter = now + validity_interval + result = self.certificate.bpki_certify( + keypair = self.private_key, + subject_name = subject_name, + subject_key = subject_key, + serial = self.next_serial, + now = now, + notAfter = notAfter, + is_ca = is_ca, + pathLenConstraint = pathLenConstraint) + self.next_serial += 1 + return result + + def revoke(self, cert): + Revocation.objects.create( + issuer = self, + revoked = rpki.sundial.now(), + serial = cert.certificate.getSerial(), + expires = cert.certificate.getNotAfter() + crl_interval) + cert.delete() + self.generate_crl() + + def generate_crl(self): + now = rpki.sundial.now() + self.revocations.filter(expires__lt = now).delete() + revoked = [(r.serial, r.revoked) for r in self.revocations.all()] + self.latest_crl = rpki.x509.CRL.generate( + keypair = self.private_key, + issuer = self.certificate, + serial = self.next_crl_number, + thisUpdate = now, + nextUpdate = now + crl_interval, + revokedCertificates = revoked) + self.last_crl_update = now + self.next_crl_update = now + crl_interval + self.next_crl_number += 1 + +class ServerCA(CA): + objects = ServerCAManager() + + def __unicode__(self): + return "" + + @property + def subject_name(self): + if self.certificate is not None: + return self.certificate.getSubject() + else: + return rpki.x509.X501DN.from_cn("%s BPKI server CA" % socket.gethostname()) + +class ResourceHolderCA(CA): + handle = HandleField(unique = True) + objects = ResourceHolderCAManager() + + def __unicode__(self): + return self.handle + + @property + def subject_name(self): + if self.certificate is not None: + return self.certificate.getSubject() + else: + return rpki.x509.X501DN.from_cn("%s BPKI resource CA" % self.handle) + +class Certificate(django.db.models.Model): + + certificate = CertificateField() + objects = CertificateManager() + + class Meta: + abstract = True + unique_together = ("issuer", "handle") + + def revoke(self): + self.issuer.revoke(self) + +class CrossCertification(Certificate): + handle = HandleField() + ta = CertificateField() + + class Meta: + abstract = True + + def avow(self): + self.certificate = self.issuer.certify( + subject_name = self.ta.getSubject(), + subject_key = self.ta.getPublicKey(), + validity_interval = ee_certificate_lifetime, + is_ca = True, + pathLenConstraint = 0) + + def __unicode__(self): + return self.handle + +class HostedCA(Certificate): + issuer = django.db.models.ForeignKey(ServerCA) + hosted = django.db.models.OneToOneField(ResourceHolderCA, related_name = "hosted_by") + + def avow(self): + self.certificate = self.issuer.certify( + subject_name = self.hosted.certificate.getSubject(), + subject_key = self.hosted.certificate.getPublicKey(), + validity_interval = ee_certificate_lifetime, + is_ca = True, + pathLenConstraint = 1) + + class Meta: + unique_together = ("issuer", "hosted") + + def __unicode__(self): + return self.hosted.handle + +class Revocation(django.db.models.Model): + serial = django.db.models.BigIntegerField() + revoked = SundialField() + expires = SundialField() + + class Meta: + abstract = True + unique_together = ("issuer", "serial") + +class ServerRevocation(Revocation): + issuer = django.db.models.ForeignKey(ServerCA, related_name = "revocations") + +class ResourceHolderRevocation(Revocation): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "revocations") + +class EECertificate(Certificate): + private_key = RSAKeyField() + + class Meta: + abstract = True + + def avow(self): + if self.private_key is None: + self.private_key = rpki.x509.RSA.generate(quiet = True) + self.certificate = self.issuer.certify( + subject_name = self.subject_name, + subject_key = self.private_key.get_public(), + validity_interval = ee_certificate_lifetime, + is_ca = False) + +class ServerEE(EECertificate): + issuer = django.db.models.ForeignKey(ServerCA, related_name = "ee_certificates") + purpose = EnumField(choices = ("rpkid", "pubd", "irdbd", "irbe")) + + class Meta: + unique_together = ("issuer", "purpose") + + @property + def subject_name(self): + return rpki.x509.X501DN.from_cn("%s BPKI %s EE" % (socket.gethostname(), + self.get_purpose_display())) + +class Referral(EECertificate): + issuer = django.db.models.OneToOneField(ResourceHolderCA, related_name = "referral_certificate") + objects = ResourceHolderEEManager() + + @property + def subject_name(self): + return rpki.x509.X501DN.from_cn("%s BPKI Referral EE" % self.issuer.handle) + +class Turtle(django.db.models.Model): + service_uri = django.db.models.CharField(max_length = 255) + +class Rootd(EECertificate, Turtle): + issuer = django.db.models.OneToOneField(ResourceHolderCA, related_name = "rootd") + objects = ResourceHolderEEManager() + + @property + def subject_name(self): + return rpki.x509.X501DN.from_cn("%s BPKI rootd EE" % self.issuer.handle) + +class BSC(Certificate): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "bscs") + handle = HandleField() + pkcs10 = PKCS10Field() + + def avow(self): + self.certificate = self.issuer.certify( + subject_name = self.pkcs10.getSubject(), + subject_key = self.pkcs10.getPublicKey(), + validity_interval = ee_certificate_lifetime, + is_ca = False) + + def __unicode__(self): + return self.handle + +class ResourceSet(django.db.models.Model): + valid_until = SundialField() + + class Meta: + abstract = True + + @property + def resource_bag(self): + raw_asn, raw_net = self._select_resource_bag() + asns = rpki.resource_set.resource_set_as.from_django( + (a.start_as, a.end_as) for a in raw_asn) + ipv4 = rpki.resource_set.resource_set_ipv4.from_django( + (a.start_ip, a.end_ip) for a in raw_net if a.version == "IPv4") + ipv6 = rpki.resource_set.resource_set_ipv6.from_django( + (a.start_ip, a.end_ip) for a in raw_net if a.version == "IPv6") + return rpki.resource_set.resource_bag( + valid_until = self.valid_until, asn = asns, v4 = ipv4, v6 = ipv6) + + # Writing of .setter method deferred until something needs it. + +class ResourceSetASN(django.db.models.Model): + start_as = django.db.models.BigIntegerField() + end_as = django.db.models.BigIntegerField() + + class Meta: + abstract = True + + def as_resource_range(self): + return rpki.resource_set.resource_range_as(self.start_as, self.end_as) + +class ResourceSetNet(django.db.models.Model): + start_ip = django.db.models.CharField(max_length = 40) + end_ip = django.db.models.CharField(max_length = 40) + version = EnumField(choices = ip_version_choices) + + class Meta: + abstract = True + + def as_resource_range(self): + return rpki.resource_set.resource_range_ip.from_strings(self.start_ip, self.end_ip) + +class Child(CrossCertification, ResourceSet): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "children") + name = django.db.models.TextField(null = True, blank = True) + + def _select_resource_bag(self): + child_asn = rpki.irdb.ChildASN.objects.raw(""" + SELECT * + FROM irdb_childasn + WHERE child_id = %s + """, [self.id]) + child_net = list(rpki.irdb.ChildNet.objects.raw(""" + SELECT * + FROM irdb_childnet + WHERE child_id = %s + """, [self.id])) + return child_asn, child_net + + class Meta: + unique_together = ("issuer", "handle") + +class ChildASN(ResourceSetASN): + child = django.db.models.ForeignKey(Child, related_name = "asns") + + class Meta: + unique_together = ("child", "start_as", "end_as") + +class ChildNet(ResourceSetNet): + child = django.db.models.ForeignKey(Child, related_name = "address_ranges") + + class Meta: + unique_together = ("child", "start_ip", "end_ip", "version") + +class Parent(CrossCertification, Turtle): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "parents") + parent_handle = HandleField() + child_handle = HandleField() + repository_type = EnumField(choices = ("none", "offer", "referral")) + referrer = HandleField(null = True, blank = True) + referral_authorization = SignedReferralField(null = True, blank = True) + + # This shouldn't be necessary + class Meta: + unique_together = ("issuer", "handle") + +class ROARequest(django.db.models.Model): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "roa_requests") + asn = django.db.models.BigIntegerField() + + @property + def roa_prefix_bag(self): + prefixes = list(rpki.irdb.ROARequestPrefix.objects.raw(""" + SELECT * + FROM irdb_roarequestprefix + WHERE roa_request_id = %s + """, [self.id])) + v4 = rpki.resource_set.roa_prefix_set_ipv4.from_django( + (p.prefix, p.prefixlen, p.max_prefixlen) for p in prefixes if p.version == "IPv4") + v6 = rpki.resource_set.roa_prefix_set_ipv6.from_django( + (p.prefix, p.prefixlen, p.max_prefixlen) for p in prefixes if p.version == "IPv6") + return rpki.resource_set.roa_prefix_bag(v4 = v4, v6 = v6) + + # Writing of .setter method deferred until something needs it. + +class ROARequestPrefix(django.db.models.Model): + roa_request = django.db.models.ForeignKey(ROARequest, related_name = "prefixes") + version = EnumField(choices = ip_version_choices) + prefix = django.db.models.CharField(max_length = 40) + prefixlen = django.db.models.PositiveSmallIntegerField() + max_prefixlen = django.db.models.PositiveSmallIntegerField() + + def as_roa_prefix(self): + if self.version == 'IPv4': + return rpki.resource_set.roa_prefix_ipv4(rpki.POW.IPAddress(self.prefix), self.prefixlen, self.max_prefixlen) + else: + return rpki.resource_set.roa_prefix_ipv6(rpki.POW.IPAddress(self.prefix), self.prefixlen, self.max_prefixlen) + + def as_resource_range(self): + return self.as_roa_prefix().to_resource_range() + + class Meta: + unique_together = ("roa_request", "version", "prefix", "prefixlen", "max_prefixlen") + +class GhostbusterRequest(django.db.models.Model): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "ghostbuster_requests") + parent = django.db.models.ForeignKey(Parent, related_name = "ghostbuster_requests", null = True) + vcard = django.db.models.TextField() + +class EECertificateRequest(ResourceSet): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "ee_certificate_requests") + pkcs10 = PKCS10Field() + gski = django.db.models.CharField(max_length = 27) + cn = django.db.models.CharField(max_length = 64) + sn = django.db.models.CharField(max_length = 64) + eku = django.db.models.TextField(null = True) + + def _select_resource_bag(self): + ee_asn = rpki.irdb.EECertificateRequestASN.objects.raw(""" + SELECT * + FROM irdb_eecertificaterequestasn + WHERE ee_certificate_request_id = %s + """, [self.id]) + ee_net = rpki.irdb.EECertificateRequestNet.objects.raw(""" + SELECT * + FROM irdb_eecertificaterequestnet + WHERE ee_certificate_request_id = %s + """, [self.id]) + return ee_asn, ee_net + + class Meta: + unique_together = ("issuer", "gski") + +class EECertificateRequestASN(ResourceSetASN): + ee_certificate_request = django.db.models.ForeignKey(EECertificateRequest, related_name = "asns") + + class Meta: + unique_together = ("ee_certificate_request", "start_as", "end_as") + +class EECertificateRequestNet(ResourceSetNet): + ee_certificate_request = django.db.models.ForeignKey(EECertificateRequest, related_name = "address_ranges") + + class Meta: + unique_together = ("ee_certificate_request", "start_ip", "end_ip", "version") + +class Repository(CrossCertification): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "repositories") + client_handle = HandleField() + service_uri = django.db.models.CharField(max_length = 255) + sia_base = django.db.models.TextField() + turtle = django.db.models.OneToOneField(Turtle, related_name = "repository") + + # This shouldn't be necessary + class Meta: + unique_together = ("issuer", "handle") + +class Client(CrossCertification): + issuer = django.db.models.ForeignKey(ServerCA, related_name = "clients") + sia_base = django.db.models.TextField() + parent_handle = HandleField() + + # This shouldn't be necessary + class Meta: + unique_together = ("issuer", "handle") + +# for Django South -- these are just simple subclasses +add_introspection_rules([], + ('^rpki\.irdb\.models\.CertificateField', + '^rpki\.irdb\.models\.CRLField', + '^rpki\.irdb\.models\.EnumField', + '^rpki\.irdb\.models\.HandleField', + '^rpki\.irdb\.models\.RSAKeyField', + '^rpki\.irdb\.models\.SignedReferralField', + '^rpki\.irdb\.models\.SundialField')) diff --git a/rpki/irdb/router.py b/rpki/irdb/router.py new file mode 100644 index 00000000..1f27d0c9 --- /dev/null +++ b/rpki/irdb/router.py @@ -0,0 +1,95 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Django-style "Database router". + +For most programs, you don't need this. Django's normal mode of +behavior is to use a single SQL database for the IRDB, which is +normally what we want. For certain test scenarios, however, it's +useful to be able to use the same Django ORM models and managers with +multiple databases without having to complicate the interface by +passing database names everywhere. Using a database router +accomplishes this. +""" + +class DBContextRouter(object): + """ + A Django database router for use with multiple IRDBs. + + This router is designed to work in conjunction with the + rpki.irdb.database context handler (q.v.). + """ + + _app = "irdb" + + _database = None + + def db_for_read(self, model, **hints): + if model._meta.app_label == self._app: + return self._database + else: + return None + + def db_for_write(self, model, **hints): + if model._meta.app_label == self._app: + return self._database + else: + return None + + def allow_relation(self, obj1, obj2, **hints): + if self._database is None: + return None + elif obj1._meta.app_label == self._app and obj2._meta.app_label == self._app: + return True + else: + return None + + def allow_syncdb(self, db, model): + if db == self._database and model._meta.app_label == self._app: + return True + else: + return None + +class database(object): + """ + Context manager for use with DBContextRouter. Use thusly: + + with rpki.irdb.database("blarg"): + do_stuff() + + This binds IRDB operations to database blarg for the duration of + the call to do_stuff(), then restores the prior state. + """ + + def __init__(self, name, on_entry = None, on_exit = None): + if not isinstance(name, str): + raise ValueError("database name must be a string, not %r" % name) + self.name = name + self.on_entry = on_entry + self.on_exit = on_exit + + def __enter__(self): + if self.on_entry is not None: + self.on_entry() + self.former = DBContextRouter._database + DBContextRouter._database = self.name + + def __exit__(self, _type, value, traceback): + assert DBContextRouter._database is self.name + DBContextRouter._database = self.former + if self.on_exit is not None: + self.on_exit() diff --git a/rpki/irdb/zookeeper.py b/rpki/irdb/zookeeper.py new file mode 100644 index 00000000..f99dc9f0 --- /dev/null +++ b/rpki/irdb/zookeeper.py @@ -0,0 +1,1682 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Management code for the IRDB. +""" + +# pylint: disable=W0612 + +import os +import copy +import types +import rpki.config +import rpki.cli +import rpki.sundial +import rpki.log +import rpki.oids +import rpki.http +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.x509 +import rpki.async +import rpki.irdb +import django.db.transaction + +from lxml.etree import (Element, SubElement, ElementTree, + tostring as ElementToString) + +from rpki.csv_utils import csv_reader + + + +# XML namespace and protocol version for OOB setup protocol. The name +# is historical and may change before we propose this as the basis for +# a standard. + +myrpki_namespace = "http://www.hactrn.net/uris/rpki/myrpki/" +myrpki_version = "2" +myrpki_namespaceQName = "{" + myrpki_namespace + "}" + +# XML namespace and protocol version for router certificate requests. +# We probably ought to be pulling this sort of thing from the schema, +# with an assertion to make sure that we understand the current +# protocol version number, but just copy what we did for myrpki until +# I'm ready to rewrite the rpki.relaxng code. + +routercert_namespace = "http://www.hactrn.net/uris/rpki/router-certificate/" +routercert_version = "1" +routercert_namespaceQName = "{" + routercert_namespace + "}" + +myrpki_section = "myrpki" +irdbd_section = "irdbd" +rpkid_section = "rpkid" +pubd_section = "pubd" +rootd_section = "rootd" + +# A whole lot of exceptions + +class HandleNotSet(Exception): "Handle not set." +class MissingHandle(Exception): "Missing handle." +class CouldntTalkToDaemon(Exception): "Couldn't talk to daemon." +class BadXMLMessage(Exception): "Bad XML message." +class PastExpiration(Exception): "Expiration date has already passed." +class CantRunRootd(Exception): "Can't run rootd." + + + +def B64Element(e, tag, obj, **kwargs): + """ + Create an XML element containing Base64 encoded data taken from a + DER object. + """ + + if e is None: + se = Element(tag, **kwargs) + else: + se = SubElement(e, tag, **kwargs) + if e is not None and e.text is None: + e.text = "\n" + se.text = "\n" + obj.get_Base64() + se.tail = "\n" + return se + +class PEM_writer(object): + """ + Write PEM files to disk, keeping track of which ones we've already + written and setting the file mode appropriately. + + Comparing the old file with what we're about to write serves no real + purpose except to calm users who find repeated messages about + writing the same file confusing. + """ + + def __init__(self, logstream = None): + self.wrote = set() + self.logstream = logstream + + def __call__(self, filename, obj, compare = True): + filename = os.path.realpath(filename) + if filename in self.wrote: + return + tempname = filename + pem = obj.get_PEM() + if not filename.startswith("/dev/"): + try: + if compare and pem == open(filename, "r").read(): + return + except: # pylint: disable=W0702 + pass + tempname += ".%s.tmp" % os.getpid() + mode = 0400 if filename.endswith(".key") else 0444 + if self.logstream is not None: + self.logstream.write("Writing %s\n" % filename) + f = os.fdopen(os.open(tempname, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode), "w") + f.write(pem) + f.close() + if tempname != filename: + os.rename(tempname, filename) + self.wrote.add(filename) + + + + +def etree_read(filename): + """ + Read an etree from a file, verifying then stripping XML namespace + cruft. + """ + + e = ElementTree(file = filename).getroot() + rpki.relaxng.myrpki.assertValid(e) + for i in e.getiterator(): + if i.tag.startswith(myrpki_namespaceQName): + i.tag = i.tag[len(myrpki_namespaceQName):] + else: + raise BadXMLMessage, "XML tag %r is not in namespace %r" % (i.tag, myrpki_namespace) + return e + + +class etree_wrapper(object): + """ + Wrapper for ETree objects so we can return them as function results + without requiring the caller to understand much about them. + + """ + + def __init__(self, e, msg = None, debug = False): + self.msg = msg + e = copy.deepcopy(e) + e.set("version", myrpki_version) + for i in e.getiterator(): + if i.tag[0] != "{": + i.tag = myrpki_namespaceQName + i.tag + assert i.tag.startswith(myrpki_namespaceQName) + if debug: + print ElementToString(e) + rpki.relaxng.myrpki.assertValid(e) + self.etree = e + + def __str__(self): + return ElementToString(self.etree) + + def save(self, filename, logstream = None): + filename = os.path.realpath(filename) + tempname = filename + if not filename.startswith("/dev/"): + tempname += ".%s.tmp" % os.getpid() + ElementTree(self.etree).write(tempname) + if tempname != filename: + os.rename(tempname, filename) + if logstream is not None: + logstream.write("Wrote %s\n" % filename) + if self.msg is not None: + logstream.write(self.msg + "\n") + + @property + def file(self): + from cStringIO import StringIO + return StringIO(ElementToString(self.etree)) + + + +class Zookeeper(object): + + ## @var show_xml + # Whether to show XML for debugging + + show_xml = False + + def __init__(self, cfg = None, handle = None, logstream = None): + + if cfg is None: + cfg = rpki.config.parser() + + if handle is None: + handle = cfg.get("handle", section = myrpki_section) + + self.cfg = cfg + + self.logstream = logstream + + self.run_rpkid = cfg.getboolean("run_rpkid", section = myrpki_section) + self.run_pubd = cfg.getboolean("run_pubd", section = myrpki_section) + self.run_rootd = cfg.getboolean("run_rootd", section = myrpki_section) + + if self.run_rootd and (not self.run_pubd or not self.run_rpkid): + raise CantRunRootd, "Can't run rootd unless also running rpkid and pubd" + + self.default_repository = cfg.get("default_repository", "", section = myrpki_section) + self.pubd_contact_info = cfg.get("pubd_contact_info", "", section = myrpki_section) + + self.rsync_module = cfg.get("publication_rsync_module", section = myrpki_section) + self.rsync_server = cfg.get("publication_rsync_server", section = myrpki_section) + + self.reset_identity(handle) + + + def reset_identity(self, handle): + """ + Select handle of current resource holding entity. + """ + + if handle is None: + raise MissingHandle + self.handle = handle + + + def set_logstream(self, logstream): + """ + Set log stream for this Zookeeper. The log stream is a file-like + object, or None to suppress all logging. + """ + + self.logstream = logstream + + + def log(self, msg): + """ + Send some text to this Zookeeper's log stream, if one is set. + """ + + if self.logstream is not None: + self.logstream.write(msg) + self.logstream.write("\n") + + + @property + def resource_ca(self): + """ + Get ResourceHolderCA object associated with current handle. + """ + + if self.handle is None: + raise HandleNotSet + return rpki.irdb.ResourceHolderCA.objects.get(handle = self.handle) + + + @property + def server_ca(self): + """ + Get ServerCA object. + """ + + return rpki.irdb.ServerCA.objects.get() + + + @django.db.transaction.commit_on_success + def initialize_server_bpki(self): + """ + Initialize server BPKI portion of an RPKI installation. Reads the + configuration file and generates the initial BPKI server + certificates needed to start daemons. + """ + + if self.run_rpkid or self.run_pubd: + server_ca, created = rpki.irdb.ServerCA.objects.get_or_certify() + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "irbe") + + if self.run_rpkid: + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "rpkid") + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "irdbd") + + if self.run_pubd: + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "pubd") + + + @django.db.transaction.commit_on_success + def initialize_resource_bpki(self): + """ + Initialize the resource-holding BPKI for an RPKI installation. + Returns XML describing the resource holder. + + This method is present primarily for backwards compatibility with + the old combined initialize() method which initialized both the + server BPKI and the default resource-holding BPKI in a single + method call. In the long run we want to replace this with + something that takes a handle as argument and creates the + resource-holding BPKI idenity if needed. + """ + + resource_ca, created = rpki.irdb.ResourceHolderCA.objects.get_or_certify(handle = self.handle) + return self.generate_identity() + + + def initialize(self): + """ + Backwards compatibility wrapper: calls initialize_server_bpki() + and initialize_resource_bpki(), returns latter's result. + """ + + self.initialize_server_bpki() + return self.initialize_resource_bpki() + + + def generate_identity(self): + """ + Generate identity XML. Broken out of .initialize() because it's + easier for the GUI this way. + """ + + e = Element("identity", handle = self.handle) + B64Element(e, "bpki_ta", self.resource_ca.certificate) + return etree_wrapper(e, msg = 'This is the "identity" file you will need to send to your parent') + + + @django.db.transaction.commit_on_success + def delete_self(self): + """ + Delete the ResourceHolderCA object corresponding to the current handle. + This corresponds to deleting an rpkid <self/> object. + + This code assumes the normal Django cascade-on-delete behavior, + that is, we assume that deleting the ResourceHolderCA object + deletes all the subordinate objects that refer to it via foreign + key relationships. + """ + + resource_ca = self.resource_ca + if resource_ca is not None: + resource_ca.delete() + else: + self.log("No such ResourceHolderCA \"%s\"" % self.handle) + + + @django.db.transaction.commit_on_success + def configure_rootd(self): + + assert self.run_rpkid and self.run_pubd and self.run_rootd + + rpki.irdb.Rootd.objects.get_or_certify( + issuer = self.resource_ca, + service_uri = "http://localhost:%s/" % self.cfg.get("rootd_server_port", section = myrpki_section)) + + return self.generate_rootd_repository_offer() + + + def generate_rootd_repository_offer(self): + """ + Generate repository offer for rootd. Split out of + configure_rootd() because that's easier for the GUI. + """ + + # The following assumes we'll set up the respository manually. + # Not sure this is a reasonable assumption, particularly if we + # ever fix rootd to use the publication protocol. + + try: + self.resource_ca.repositories.get(handle = self.handle) + return None + + except rpki.irdb.Repository.DoesNotExist: + e = Element("repository", type = "offer", handle = self.handle, parent_handle = self.handle) + B64Element(e, "bpki_client_ta", self.resource_ca.certificate) + return etree_wrapper(e, msg = 'This is the "repository offer" file for you to use if you want to publish in your own repository') + + + def write_bpki_files(self): + """ + Write out BPKI certificate, key, and CRL files for daemons that + need them. + """ + + writer = PEM_writer(self.logstream) + + if self.run_rpkid: + rpkid = self.server_ca.ee_certificates.get(purpose = "rpkid") + writer(self.cfg.get("bpki-ta", section = rpkid_section), self.server_ca.certificate) + writer(self.cfg.get("rpkid-key", section = rpkid_section), rpkid.private_key) + writer(self.cfg.get("rpkid-cert", section = rpkid_section), rpkid.certificate) + writer(self.cfg.get("irdb-cert", section = rpkid_section), + self.server_ca.ee_certificates.get(purpose = "irdbd").certificate) + writer(self.cfg.get("irbe-cert", section = rpkid_section), + self.server_ca.ee_certificates.get(purpose = "irbe").certificate) + + if self.run_pubd: + pubd = self.server_ca.ee_certificates.get(purpose = "pubd") + writer(self.cfg.get("bpki-ta", section = pubd_section), self.server_ca.certificate) + writer(self.cfg.get("pubd-key", section = pubd_section), pubd.private_key) + writer(self.cfg.get("pubd-cert", section = pubd_section), pubd.certificate) + writer(self.cfg.get("irbe-cert", section = pubd_section), + self.server_ca.ee_certificates.get(purpose = "irbe").certificate) + + if self.run_rootd: + try: + rootd = rpki.irdb.ResourceHolderCA.objects.get(handle = self.handle).rootd + writer(self.cfg.get("bpki-ta", section = rootd_section), self.server_ca.certificate) + writer(self.cfg.get("rootd-bpki-crl", section = rootd_section), self.server_ca.latest_crl) + writer(self.cfg.get("rootd-bpki-key", section = rootd_section), rootd.private_key) + writer(self.cfg.get("rootd-bpki-cert", section = rootd_section), rootd.certificate) + writer(self.cfg.get("child-bpki-cert", section = rootd_section), rootd.issuer.certificate) + except rpki.irdb.ResourceHolderCA.DoesNotExist: + self.log("rootd enabled but resource holding entity not yet configured, skipping rootd setup") + except rpki.irdb.Rootd.DoesNotExist: + self.log("rootd enabled but not yet configured, skipping rootd setup") + + + @django.db.transaction.commit_on_success + def update_bpki(self): + """ + Update BPKI certificates. Assumes an existing RPKI installation. + + Basic plan here is to reissue all BPKI certificates we can, right + now. In the long run we might want to be more clever about only + touching ones that need maintenance, but this will do for a start. + + We also reissue CRLs for all CAs. + + Most likely this should be run under cron. + """ + + for model in (rpki.irdb.ServerCA, + rpki.irdb.ResourceHolderCA, + rpki.irdb.ServerEE, + rpki.irdb.Referral, + rpki.irdb.Rootd, + rpki.irdb.HostedCA, + rpki.irdb.BSC, + rpki.irdb.Child, + rpki.irdb.Parent, + rpki.irdb.Client, + rpki.irdb.Repository): + for obj in model.objects.all(): + self.log("Regenerating BPKI certificate %s" % obj.certificate.getSubject()) + obj.avow() + obj.save() + + self.log("Regenerating Server BPKI CRL") + self.server_ca.generate_crl() + self.server_ca.save() + + for ca in rpki.irdb.ResourceHolderCA.objects.all(): + self.log("Regenerating BPKI CRL for Resource Holder %s" % ca.handle) + ca.generate_crl() + ca.save() + + + @django.db.transaction.commit_on_success + def synchronize_bpki(self): + """ + Synchronize BPKI updates. This is separate from .update_bpki() + because this requires rpkid to be running and none of the other + BPKI update stuff does; there may be circumstances under which it + makes sense to do the rest of the BPKI update and allow this to + fail with a warning. + """ + + if self.run_rpkid: + updates = [] + + updates.extend( + rpki.left_right.self_elt.make_pdu( + action = "set", + tag = "%s__self" % ca.handle, + self_handle = ca.handle, + bpki_cert = ca.certificate) + for ca in rpki.irdb.ResourceHolderCA.objects.all()) + + updates.extend( + rpki.left_right.bsc_elt.make_pdu( + action = "set", + tag = "%s__bsc__%s" % (bsc.issuer.handle, bsc.handle), + self_handle = bsc.issuer.handle, + bsc_handle = bsc.handle, + signing_cert = bsc.certificate, + signing_cert_crl = bsc.issuer.latest_crl) + for bsc in rpki.irdb.BSC.objects.all()) + + updates.extend( + rpki.left_right.repository_elt.make_pdu( + action = "set", + tag = "%s__repository__%s" % (repository.issuer.handle, repository.handle), + self_handle = repository.issuer.handle, + repository_handle = repository.handle, + bpki_cert = repository.certificate) + for repository in rpki.irdb.Repository.objects.all()) + + updates.extend( + rpki.left_right.parent_elt.make_pdu( + action = "set", + tag = "%s__parent__%s" % (parent.issuer.handle, parent.handle), + self_handle = parent.issuer.handle, + parent_handle = parent.handle, + bpki_cms_cert = parent.certificate) + for parent in rpki.irdb.Parent.objects.all()) + + updates.extend( + rpki.left_right.parent_elt.make_pdu( + action = "set", + tag = "%s__rootd" % rootd.issuer.handle, + self_handle = rootd.issuer.handle, + parent_handle = rootd.issuer.handle, + bpki_cms_cert = rootd.certificate) + for rootd in rpki.irdb.Rootd.objects.all()) + + updates.extend( + rpki.left_right.child_elt.make_pdu( + action = "set", + tag = "%s__child__%s" % (child.issuer.handle, child.handle), + self_handle = child.issuer.handle, + child_handle = child.handle, + bpki_cert = child.certificate) + for child in rpki.irdb.Child.objects.all()) + + if updates: + self.check_error_report(self.call_rpkid(updates)) + + if self.run_pubd: + updates = [] + + updates.append( + rpki.publication.config_elt.make_pdu( + action = "set", + bpki_crl = self.server_ca.latest_crl)) + + updates.extend( + rpki.publication.client_elt.make_pdu( + action = "set", + client_handle = client.handle, + bpki_cert = client.certificate) + for client in self.server_ca.clients.all()) + + if updates: + self.check_error_report(self.call_pubd(updates)) + + + @django.db.transaction.commit_on_success + def configure_child(self, filename, child_handle = None, valid_until = None): + """ + Configure a new child of this RPKI entity, given the child's XML + identity file as an input. Extracts the child's data from the + XML, cross-certifies the child's resource-holding BPKI + certificate, and generates an XML file describing the relationship + between the child and this parent, including this parent's BPKI + data and up-down protocol service URI. + """ + + c = etree_read(filename) + + if child_handle is None: + child_handle = c.get("handle") + + if valid_until is None: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) + else: + valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) + if valid_until < rpki.sundial.now(): + raise PastExpiration, "Specified new expiration time %s has passed" % valid_until + + self.log("Child calls itself %r, we call it %r" % (c.get("handle"), child_handle)) + + child, created = rpki.irdb.Child.objects.get_or_certify( + issuer = self.resource_ca, + handle = child_handle, + ta = rpki.x509.X509(Base64 = c.findtext("bpki_ta")), + valid_until = valid_until) + + return self.generate_parental_response(child), child_handle + + + @django.db.transaction.commit_on_success + def generate_parental_response(self, child): + """ + Generate parental response XML. Broken out of .configure_child() + for GUI. + """ + + service_uri = "http://%s:%s/up-down/%s/%s" % ( + self.cfg.get("rpkid_server_host", section = myrpki_section), + self.cfg.get("rpkid_server_port", section = myrpki_section), + self.handle, child.handle) + + e = Element("parent", parent_handle = self.handle, child_handle = child.handle, + service_uri = service_uri, valid_until = str(child.valid_until)) + B64Element(e, "bpki_resource_ta", self.resource_ca.certificate) + B64Element(e, "bpki_child_ta", child.ta) + + try: + if self.default_repository: + repo = self.resource_ca.repositories.get(handle = self.default_repository) + else: + repo = self.resource_ca.repositories.get() + except rpki.irdb.Repository.DoesNotExist: + repo = None + + if repo is None: + self.log("Couldn't find any usable repositories, not giving referral") + + elif repo.handle == self.handle: + SubElement(e, "repository", type = "offer") + + else: + proposed_sia_base = repo.sia_base + child.handle + "/" + referral_cert, created = rpki.irdb.Referral.objects.get_or_certify(issuer = self.resource_ca) + auth = rpki.x509.SignedReferral() + auth.set_content(B64Element(None, myrpki_namespaceQName + "referral", child.ta, + version = myrpki_version, + authorized_sia_base = proposed_sia_base)) + auth.schema_check() + auth.sign(referral_cert.private_key, referral_cert.certificate, self.resource_ca.latest_crl) + + r = SubElement(e, "repository", type = "referral") + B64Element(r, "authorization", auth, referrer = repo.client_handle) + SubElement(r, "contact_info") + + return etree_wrapper(e, msg = "Send this file back to the child you just configured") + + + @django.db.transaction.commit_on_success + def delete_child(self, child_handle): + """ + Delete a child of this RPKI entity. + """ + + self.resource_ca.children.get(handle = child_handle).delete() + + + @django.db.transaction.commit_on_success + def configure_parent(self, filename, parent_handle = None): + """ + Configure a new parent of this RPKI entity, given the output of + the parent's configure_child command as input. Reads the parent's + response XML, extracts the parent's BPKI and service URI + information, cross-certifies the parent's BPKI data into this + entity's BPKI, and checks for offers or referrals of publication + service. If a publication offer or referral is present, we + generate a request-for-service message to that repository, in case + the user wants to avail herself of the referral or offer. + """ + + p = etree_read(filename) + + if parent_handle is None: + parent_handle = p.get("parent_handle") + + r = p.find("repository") + + repository_type = "none" + referrer = None + referral_authorization = None + + if r is not None: + repository_type = r.get("type") + + if repository_type == "referral": + a = r.find("authorization") + referrer = a.get("referrer") + referral_authorization = rpki.x509.SignedReferral(Base64 = a.text) + + self.log("Parent calls itself %r, we call it %r" % (p.get("parent_handle"), parent_handle)) + self.log("Parent calls us %r" % p.get("child_handle")) + + parent, created = rpki.irdb.Parent.objects.get_or_certify( + issuer = self.resource_ca, + handle = parent_handle, + child_handle = p.get("child_handle"), + parent_handle = p.get("parent_handle"), + service_uri = p.get("service_uri"), + ta = rpki.x509.X509(Base64 = p.findtext("bpki_resource_ta")), + repository_type = repository_type, + referrer = referrer, + referral_authorization = referral_authorization) + + return self.generate_repository_request(parent), parent_handle + + + def generate_repository_request(self, parent): + """ + Generate repository request for a given parent. + """ + + e = Element("repository", handle = self.handle, + parent_handle = parent.handle, type = parent.repository_type) + if parent.repository_type == "referral": + B64Element(e, "authorization", parent.referral_authorization, referrer = parent.referrer) + SubElement(e, "contact_info") + B64Element(e, "bpki_client_ta", self.resource_ca.certificate) + return etree_wrapper(e, msg = "This is the file to send to the repository operator") + + + @django.db.transaction.commit_on_success + def delete_parent(self, parent_handle): + """ + Delete a parent of this RPKI entity. + """ + + self.resource_ca.parents.get(handle = parent_handle).delete() + + + @django.db.transaction.commit_on_success + def delete_rootd(self): + """ + Delete rootd associated with this RPKI entity. + """ + + self.resource_ca.rootd.delete() + + + @django.db.transaction.commit_on_success + def configure_publication_client(self, filename, sia_base = None, flat = False): + """ + Configure publication server to know about a new client, given the + client's request-for-service message as input. Reads the client's + request for service, cross-certifies the client's BPKI data, and + generates a response message containing the repository's BPKI data + and service URI. + """ + + client = etree_read(filename) + + client_ta = rpki.x509.X509(Base64 = client.findtext("bpki_client_ta")) + + if sia_base is None and flat: + self.log("Flat publication structure forced, homing client at top-level") + sia_base = "rsync://%s/%s/%s/" % (self.rsync_server, self.rsync_module, client.get("handle")) + + if sia_base is None and client.get("type") == "referral": + self.log("This looks like a referral, checking") + try: + auth = client.find("authorization") + referrer = self.server_ca.clients.get(handle = auth.get("referrer")) + referral_cms = rpki.x509.SignedReferral(Base64 = auth.text) + referral_xml = referral_cms.unwrap(ta = (referrer.certificate, self.server_ca.certificate)) + if rpki.x509.X509(Base64 = referral_xml.text) != client_ta: + raise BadXMLMessage, "Referral trust anchor does not match" + sia_base = referral_xml.get("authorized_sia_base") + except rpki.irdb.Client.DoesNotExist: + self.log("We have no record of the client (%s) alleged to have made this referral" % auth.get("referrer")) + + if sia_base is None and client.get("type") == "offer": + self.log("This looks like an offer, checking") + try: + parent = rpki.irdb.ResourceHolderCA.objects.get(children__ta__exact = client_ta) + if "/" in parent.repositories.get(ta = self.server_ca.certificate).client_handle: + self.log("Client's parent is not top-level, this is not a valid offer") + else: + self.log("Found client and its parent, nesting") + sia_base = "rsync://%s/%s/%s/%s/" % (self.rsync_server, self.rsync_module, + parent.handle, client.get("handle")) + except rpki.irdb.Repository.DoesNotExist: + self.log("Found client's parent, but repository isn't set, this shouldn't happen!") + except rpki.irdb.ResourceHolderCA.DoesNotExist: + try: + rpki.irdb.Rootd.objects.get(issuer__certificate__exact = client_ta) + except rpki.irdb.Rootd.DoesNotExist: + self.log("We don't host this client's parent, so we didn't make this offer") + else: + self.log("This client's parent is rootd") + + if sia_base is None: + self.log("Don't know where to nest this client, defaulting to top-level") + sia_base = "rsync://%s/%s/%s/" % (self.rsync_server, self.rsync_module, client.get("handle")) + + if not sia_base.startswith("rsync://"): + raise BadXMLMessage, "Malformed sia_base parameter %r, should start with 'rsync://'" % sia_base + + client_handle = "/".join(sia_base.rstrip("/").split("/")[4:]) + + parent_handle = client.get("parent_handle") + + self.log("Client calls itself %r, we call it %r" % (client.get("handle"), client_handle)) + self.log("Client says its parent handle is %r" % parent_handle) + + client, created = rpki.irdb.Client.objects.get_or_certify( + issuer = self.server_ca, + handle = client_handle, + parent_handle = parent_handle, + ta = client_ta, + sia_base = sia_base) + + return self.generate_repository_response(client), client_handle + + + def generate_repository_response(self, client): + """ + Generate repository response XML to a given client. + """ + + service_uri = "http://%s:%s/client/%s" % ( + self.cfg.get("pubd_server_host", section = myrpki_section), + self.cfg.get("pubd_server_port", section = myrpki_section), + client.handle) + + e = Element("repository", type = "confirmed", + client_handle = client.handle, + parent_handle = client.parent_handle, + sia_base = client.sia_base, + service_uri = service_uri) + + B64Element(e, "bpki_server_ta", self.server_ca.certificate) + B64Element(e, "bpki_client_ta", client.ta) + SubElement(e, "contact_info").text = self.pubd_contact_info + return etree_wrapper(e, msg = "Send this file back to the publication client you just configured") + + + @django.db.transaction.commit_on_success + def delete_publication_client(self, client_handle): + """ + Delete a publication client of this RPKI entity. + """ + + self.server_ca.clients.get(handle = client_handle).delete() + + + @django.db.transaction.commit_on_success + def configure_repository(self, filename, parent_handle = None): + """ + Configure a publication repository for this RPKI entity, given the + repository's response to our request-for-service message as input. + Reads the repository's response, extracts and cross-certifies the + BPKI data and service URI, and links the repository data with the + corresponding parent data in our local database. + """ + + r = etree_read(filename) + + if parent_handle is None: + parent_handle = r.get("parent_handle") + + self.log("Repository calls us %r" % (r.get("client_handle"))) + self.log("Repository response associated with parent_handle %r" % parent_handle) + + try: + if parent_handle == self.handle: + turtle = self.resource_ca.rootd + else: + turtle = self.resource_ca.parents.get(handle = parent_handle) + + except (rpki.irdb.Parent.DoesNotExist, rpki.irdb.Rootd.DoesNotExist): + self.log("Could not find parent %r in our database" % parent_handle) + + else: + rpki.irdb.Repository.objects.get_or_certify( + issuer = self.resource_ca, + handle = parent_handle, + client_handle = r.get("client_handle"), + service_uri = r.get("service_uri"), + sia_base = r.get("sia_base"), + ta = rpki.x509.X509(Base64 = r.findtext("bpki_server_ta")), + turtle = turtle) + + + @django.db.transaction.commit_on_success + def delete_repository(self, repository_handle): + """ + Delete a repository of this RPKI entity. + """ + + self.resource_ca.repositories.get(handle = repository_handle).delete() + + + @django.db.transaction.commit_on_success + def renew_children(self, child_handle, valid_until = None): + """ + Update validity period for one child entity or, if child_handle is + None, for all child entities. + """ + + if child_handle is None: + children = self.resource_ca.children.all() + else: + children = self.resource_ca.children.filter(handle = child_handle) + + if valid_until is None: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) + else: + valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) + if valid_until < rpki.sundial.now(): + raise PastExpiration, "Specified new expiration time %s has passed" % valid_until + + self.log("New validity date %s" % valid_until) + + for child in children: + child.valid_until = valid_until + child.save() + + + @django.db.transaction.commit_on_success + def load_prefixes(self, filename, ignore_missing_children = False): + """ + Whack IRDB to match prefixes.csv. + """ + + grouped4 = {} + grouped6 = {} + + for handle, prefix in csv_reader(filename, columns = 2): + grouped = grouped6 if ":" in prefix else grouped4 + if handle not in grouped: + grouped[handle] = [] + grouped[handle].append(prefix) + + primary_keys = [] + + for version, grouped, rset in ((4, grouped4, rpki.resource_set.resource_set_ipv4), + (6, grouped6, rpki.resource_set.resource_set_ipv6)): + for handle, prefixes in grouped.iteritems(): + try: + child = self.resource_ca.children.get(handle = handle) + except rpki.irdb.Child.DoesNotExist: + if not ignore_missing_children: + raise + else: + for prefix in rset(",".join(prefixes)): + obj, created = rpki.irdb.ChildNet.objects.get_or_create( + child = child, + start_ip = str(prefix.min), + end_ip = str(prefix.max), + version = version) + primary_keys.append(obj.pk) + + q = rpki.irdb.ChildNet.objects + q = q.filter(child__issuer__exact = self.resource_ca) + q = q.exclude(pk__in = primary_keys) + q.delete() + + + @django.db.transaction.commit_on_success + def load_asns(self, filename, ignore_missing_children = False): + """ + Whack IRDB to match asns.csv. + """ + + grouped = {} + + for handle, asn in csv_reader(filename, columns = 2): + if handle not in grouped: + grouped[handle] = [] + grouped[handle].append(asn) + + primary_keys = [] + + for handle, asns in grouped.iteritems(): + try: + child = self.resource_ca.children.get(handle = handle) + except rpki.irdb.Child.DoesNotExist: + if not ignore_missing_children: + raise + else: + for asn in rpki.resource_set.resource_set_as(",".join(asns)): + obj, created = rpki.irdb.ChildASN.objects.get_or_create( + child = child, + start_as = str(asn.min), + end_as = str(asn.max)) + primary_keys.append(obj.pk) + + q = rpki.irdb.ChildASN.objects + q = q.filter(child__issuer__exact = self.resource_ca) + q = q.exclude(pk__in = primary_keys) + q.delete() + + + @django.db.transaction.commit_on_success + def load_roa_requests(self, filename): + """ + Whack IRDB to match roa.csv. + """ + + grouped = {} + + # format: p/n-m asn group + for pnm, asn, group in csv_reader(filename, columns = 3): + key = (asn, group) + if key not in grouped: + grouped[key] = [] + grouped[key].append(pnm) + + # Deleting and recreating all the ROA requests is inefficient, + # but rpkid's current representation of ROA requests is wrong + # (see #32), so it's not worth a lot of effort here as we're + # just going to have to rewrite this soon anyway. + + self.resource_ca.roa_requests.all().delete() + + for key, pnms in grouped.iteritems(): + asn, group = key + + roa_request = self.resource_ca.roa_requests.create(asn = asn) + + for pnm in pnms: + if ":" in pnm: + p = rpki.resource_set.roa_prefix_ipv6.parse_str(pnm) + v = 6 + else: + p = rpki.resource_set.roa_prefix_ipv4.parse_str(pnm) + v = 4 + roa_request.prefixes.create( + version = v, + prefix = str(p.prefix), + prefixlen = int(p.prefixlen), + max_prefixlen = int(p.max_prefixlen)) + + + @django.db.transaction.commit_on_success + def load_ghostbuster_requests(self, filename, parent = None): + """ + Whack IRDB to match ghostbusters.vcard. + + This accepts one or more vCards from a file. + """ + + self.resource_ca.ghostbuster_requests.filter(parent = parent).delete() + + vcard = [] + + for line in open(filename, "r"): + if not vcard and not line.upper().startswith("BEGIN:VCARD"): + continue + vcard.append(line) + if line.upper().startswith("END:VCARD"): + self.resource_ca.ghostbuster_requests.create(vcard = "".join(vcard), parent = parent) + vcard = [] + + + def call_rpkid(self, *pdus): + """ + Issue a call to rpkid, return result. + + Implementation is a little silly, constructs a wrapper object, + invokes it once, then throws it away. Hard to do better without + rewriting a bit of the HTTP code, as we want to be sure we're + using the current BPKI certificate and key objects. + """ + + url = "http://%s:%s/left-right" % ( + self.cfg.get("rpkid_server_host", section = myrpki_section), + self.cfg.get("rpkid_server_port", section = myrpki_section)) + + rpkid = self.server_ca.ee_certificates.get(purpose = "rpkid") + irbe = self.server_ca.ee_certificates.get(purpose = "irbe") + + if len(pdus) == 1 and isinstance(pdus[0], types.GeneratorType): + pdus = tuple(pdus[0]) + elif len(pdus) == 1 and isinstance(pdus[0], (tuple, list)): + pdus = pdus[0] + + call_rpkid = rpki.async.sync_wrapper(rpki.http.caller( + proto = rpki.left_right, + client_key = irbe.private_key, + client_cert = irbe.certificate, + server_ta = self.server_ca.certificate, + server_cert = rpkid.certificate, + url = url, + debug = self.show_xml)) + + return call_rpkid(*pdus) + + + def run_rpkid_now(self): + """ + Poke rpkid to immediately run the cron job for the current handle. + + This method is used by the GUI when a user has changed something in the + IRDB (ghostbuster, roa) which does not require a full synchronize() call, + to force the object to be immediately issued. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, run_now = "yes")) + + + def publish_world_now(self): + """ + Poke rpkid to (re)publish everything for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, publish_world_now = "yes")) + + + def reissue(self): + """ + Poke rpkid to reissue everything for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, reissue = "yes")) + + def rekey(self): + """ + Poke rpkid to rekey all RPKI certificates received for the current + handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, rekey = "yes")) + + + def revoke(self): + """ + Poke rpkid to revoke old RPKI keys for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, revoke = "yes")) + + + def revoke_forgotten(self): + """ + Poke rpkid to revoke old forgotten RPKI keys for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, revoke_forgotten = "yes")) + + + def clear_all_sql_cms_replay_protection(self): + """ + Tell rpkid and pubd to clear replay protection for all SQL-based + entities. This is a fairly blunt instrument, but as we don't + expect this to be necessary except in the case of gross + misconfiguration, it should suffice + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu(action = "set", self_handle = ca.handle, + clear_replay_protection = "yes") + for ca in rpki.irdb.ResourceHolderCA.objects.all()) + if self.run_pubd: + self.call_pubd(rpki.publication.client_elt.make_pdu(action = "set", + client_handle = client.handle, + clear_replay_protection = "yes") + for client in self.server_ca.clients.all()) + + + def call_pubd(self, *pdus): + """ + Issue a call to pubd, return result. + + Implementation is a little silly, constructs a wrapper object, + invokes it once, then throws it away. Hard to do better without + rewriting a bit of the HTTP code, as we want to be sure we're + using the current BPKI certificate and key objects. + """ + + url = "http://%s:%s/control" % ( + self.cfg.get("pubd_server_host", section = myrpki_section), + self.cfg.get("pubd_server_port", section = myrpki_section)) + + pubd = self.server_ca.ee_certificates.get(purpose = "pubd") + irbe = self.server_ca.ee_certificates.get(purpose = "irbe") + + if len(pdus) == 1 and isinstance(pdus[0], types.GeneratorType): + pdus = tuple(pdus[0]) + elif len(pdus) == 1 and isinstance(pdus[0], (tuple, list)): + pdus = pdus[0] + + call_pubd = rpki.async.sync_wrapper(rpki.http.caller( + proto = rpki.publication, + client_key = irbe.private_key, + client_cert = irbe.certificate, + server_ta = self.server_ca.certificate, + server_cert = pubd.certificate, + url = url, + debug = self.show_xml)) + + return call_pubd(*pdus) + + + def check_error_report(self, pdus): + """ + Check a response from rpkid or pubd for error_report PDUs, log and + throw exceptions as needed. + """ + + if any(isinstance(pdu, (rpki.left_right.report_error_elt, rpki.publication.report_error_elt)) for pdu in pdus): + for pdu in pdus: + if isinstance(pdu, rpki.left_right.report_error_elt): + self.log("rpkid reported failure: %s" % pdu.error_code) + elif isinstance(pdu, rpki.publication.report_error_elt): + self.log("pubd reported failure: %s" % pdu.error_code) + else: + continue + if pdu.error_text: + self.log(pdu.error_text) + raise CouldntTalkToDaemon + + + @django.db.transaction.commit_on_success + def synchronize(self, *handles_to_poke): + """ + Configure RPKI daemons with the data built up by the other + commands in this program. Commands which modify the IRDB and want + to whack everything into sync should call this when they're done, + but be warned that this can be slow with a lot of CAs. + + Any arguments given are handles of CAs which should be poked with a + <self run_now="yes"/> operation. + """ + + for ca in rpki.irdb.ResourceHolderCA.objects.all(): + self.synchronize_rpkid_one_ca_core(ca, ca.handle in handles_to_poke) + self.synchronize_pubd_core() + self.synchronize_rpkid_deleted_core() + + + @django.db.transaction.commit_on_success + def synchronize_ca(self, ca = None, poke = False): + """ + Synchronize one CA. Most commands which modify a CA should call + this. CA to synchronize defaults to the current resource CA. + """ + + if ca is None: + ca = self.resource_ca + self.synchronize_rpkid_one_ca_core(ca, poke) + + + @django.db.transaction.commit_on_success + def synchronize_deleted_ca(self): + """ + Delete CAs which are present in rpkid's database but not in the + IRDB. + """ + + self.synchronize_rpkid_deleted_core() + + + @django.db.transaction.commit_on_success + def synchronize_pubd(self): + """ + Synchronize pubd. Most commands which modify pubd should call this. + """ + + self.synchronize_pubd_core() + + + def synchronize_rpkid_one_ca_core(self, ca, poke = False): + """ + Synchronize one CA. This is the core synchronization code. Don't + call this directly, instead call one of the methods that calls + this inside a Django commit wrapper. + + This method configures rpkid with data built up by the other + commands in this program. Most commands which modify IRDB values + related to rpkid should call this when they're done. + + If poke is True, we append a left-right run_now operation for this + CA to the end of whatever other commands this method generates. + """ + + # We can use a single BSC for everything -- except BSC key + # rollovers. Drive off that bridge when we get to it. + + bsc_handle = "bsc" + + # A default RPKI CRL cycle time of six hours seems sane. One + # might make a case for a day instead, but we've been running with + # six hours for a while now and haven't seen a lot of whining. + + self_crl_interval = self.cfg.getint("self_crl_interval", 6 * 60 * 60, section = myrpki_section) + + # regen_margin now just controls how long before RPKI certificate + # expiration we should regenerate; it used to control the interval + # before RPKI CRL staleness at which to regenerate the CRL, but + # using the same timer value for both of these is hopeless. + # + # A default regeneration margin of two weeks gives enough time for + # humans to react. We add a two hour fudge factor in the hope + # that this will regenerate certificates just *before* the + # companion cron job warns of impending doom. + + self_regen_margin = self.cfg.getint("self_regen_margin", 14 * 24 * 60 * 60 + 2 * 60, section = myrpki_section) + + # See what rpkid already has on file for this entity. + + rpkid_reply = self.call_rpkid( + rpki.left_right.self_elt.make_pdu( action = "get", tag = "self", self_handle = ca.handle), + rpki.left_right.bsc_elt.make_pdu( action = "list", tag = "bsc", self_handle = ca.handle), + rpki.left_right.repository_elt.make_pdu(action = "list", tag = "repository", self_handle = ca.handle), + rpki.left_right.parent_elt.make_pdu( action = "list", tag = "parent", self_handle = ca.handle), + rpki.left_right.child_elt.make_pdu( action = "list", tag = "child", self_handle = ca.handle)) + + self_pdu = rpkid_reply[0] + bsc_pdus = dict((x.bsc_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.bsc_elt)) + repository_pdus = dict((x.repository_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.repository_elt)) + parent_pdus = dict((x.parent_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.parent_elt)) + child_pdus = dict((x.child_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.child_elt)) + + rpkid_query = [] + + self_cert, created = rpki.irdb.HostedCA.objects.get_or_certify( + issuer = self.server_ca, + hosted = ca) + + # There should be exactly one <self/> object per hosted entity, by definition + + if (isinstance(self_pdu, rpki.left_right.report_error_elt) or + self_pdu.crl_interval != self_crl_interval or + self_pdu.regen_margin != self_regen_margin or + self_pdu.bpki_cert != self_cert.certificate): + rpkid_query.append(rpki.left_right.self_elt.make_pdu( + action = "create" if isinstance(self_pdu, rpki.left_right.report_error_elt) else "set", + tag = "self", + self_handle = ca.handle, + bpki_cert = ca.certificate, + crl_interval = self_crl_interval, + regen_margin = self_regen_margin)) + + # In general we only need one <bsc/> per <self/>. BSC objects + # are a little unusual in that the keypair and PKCS #10 + # subelement is generated by rpkid, so complete setup requires + # two round trips. + + bsc_pdu = bsc_pdus.pop(bsc_handle, None) + + if bsc_pdu is None: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( + action = "create", + tag = "bsc", + self_handle = ca.handle, + bsc_handle = bsc_handle, + generate_keypair = "yes")) + + elif bsc_pdu.pkcs10_request is None: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( + action = "set", + tag = "bsc", + self_handle = ca.handle, + bsc_handle = bsc_handle, + generate_keypair = "yes")) + + rpkid_query.extend(rpki.left_right.bsc_elt.make_pdu( + action = "destroy", self_handle = ca.handle, bsc_handle = b) for b in bsc_pdus) + + # If we've already got actions queued up, run them now, so we + # can finish setting up the BSC before anything tries to use it. + + if rpkid_query: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu(action = "list", tag = "bsc", self_handle = ca.handle)) + rpkid_reply = self.call_rpkid(rpkid_query) + bsc_pdus = dict((x.bsc_handle, x) + for x in rpkid_reply + if isinstance(x, rpki.left_right.bsc_elt) and x.action == "list") + bsc_pdu = bsc_pdus.pop(bsc_handle, None) + self.check_error_report(rpkid_reply) + + rpkid_query = [] + + assert bsc_pdu.pkcs10_request is not None + + bsc, created = rpki.irdb.BSC.objects.get_or_certify( + issuer = ca, + handle = bsc_handle, + pkcs10 = bsc_pdu.pkcs10_request) + + if bsc_pdu.signing_cert != bsc.certificate or bsc_pdu.signing_cert_crl != ca.latest_crl: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( + action = "set", + tag = "bsc", + self_handle = ca.handle, + bsc_handle = bsc_handle, + signing_cert = bsc.certificate, + signing_cert_crl = ca.latest_crl)) + + # At present we need one <repository/> per <parent/>, not because + # rpkid requires that, but because pubd does. pubd probably should + # be fixed to support a single client allowed to update multiple + # trees, but for the moment the easiest way forward is just to + # enforce a 1:1 mapping between <parent/> and <repository/> objects + + for repository in ca.repositories.all(): + + repository_pdu = repository_pdus.pop(repository.handle, None) + + if (repository_pdu is None or + repository_pdu.bsc_handle != bsc_handle or + repository_pdu.peer_contact_uri != repository.service_uri or + repository_pdu.bpki_cert != repository.certificate): + rpkid_query.append(rpki.left_right.repository_elt.make_pdu( + action = "create" if repository_pdu is None else "set", + tag = repository.handle, + self_handle = ca.handle, + repository_handle = repository.handle, + bsc_handle = bsc_handle, + peer_contact_uri = repository.service_uri, + bpki_cert = repository.certificate)) + + rpkid_query.extend(rpki.left_right.repository_elt.make_pdu( + action = "destroy", self_handle = ca.handle, repository_handle = r) for r in repository_pdus) + + # <parent/> setup code currently assumes 1:1 mapping between + # <repository/> and <parent/>, and further assumes that the handles + # for an associated pair are the identical (that is: + # parent.repository_handle == parent.parent_handle). + # + # If no such repository exists, our choices are to ignore the + # parent entry or throw an error. For now, we ignore the parent. + + for parent in ca.parents.all(): + + try: + + parent_pdu = parent_pdus.pop(parent.handle, None) + + if (parent_pdu is None or + parent_pdu.bsc_handle != bsc_handle or + parent_pdu.repository_handle != parent.handle or + parent_pdu.peer_contact_uri != parent.service_uri or + parent_pdu.sia_base != parent.repository.sia_base or + parent_pdu.sender_name != parent.child_handle or + parent_pdu.recipient_name != parent.parent_handle or + parent_pdu.bpki_cms_cert != parent.certificate): + rpkid_query.append(rpki.left_right.parent_elt.make_pdu( + action = "create" if parent_pdu is None else "set", + tag = parent.handle, + self_handle = ca.handle, + parent_handle = parent.handle, + bsc_handle = bsc_handle, + repository_handle = parent.handle, + peer_contact_uri = parent.service_uri, + sia_base = parent.repository.sia_base, + sender_name = parent.child_handle, + recipient_name = parent.parent_handle, + bpki_cms_cert = parent.certificate)) + + except rpki.irdb.Repository.DoesNotExist: + pass + + try: + + parent_pdu = parent_pdus.pop(ca.handle, None) + + if (parent_pdu is None or + parent_pdu.bsc_handle != bsc_handle or + parent_pdu.repository_handle != ca.handle or + parent_pdu.peer_contact_uri != ca.rootd.service_uri or + parent_pdu.sia_base != ca.rootd.repository.sia_base or + parent_pdu.sender_name != ca.handle or + parent_pdu.recipient_name != ca.handle or + parent_pdu.bpki_cms_cert != ca.rootd.certificate): + rpkid_query.append(rpki.left_right.parent_elt.make_pdu( + action = "create" if parent_pdu is None else "set", + tag = ca.handle, + self_handle = ca.handle, + parent_handle = ca.handle, + bsc_handle = bsc_handle, + repository_handle = ca.handle, + peer_contact_uri = ca.rootd.service_uri, + sia_base = ca.rootd.repository.sia_base, + sender_name = ca.handle, + recipient_name = ca.handle, + bpki_cms_cert = ca.rootd.certificate)) + + except rpki.irdb.Rootd.DoesNotExist: + pass + + rpkid_query.extend(rpki.left_right.parent_elt.make_pdu( + action = "destroy", self_handle = ca.handle, parent_handle = p) for p in parent_pdus) + + # Children are simpler than parents, because they call us, so no URL + # to construct and figuring out what certificate to use is their + # problem, not ours. + + for child in ca.children.all(): + + child_pdu = child_pdus.pop(child.handle, None) + + if (child_pdu is None or + child_pdu.bsc_handle != bsc_handle or + child_pdu.bpki_cert != child.certificate): + rpkid_query.append(rpki.left_right.child_elt.make_pdu( + action = "create" if child_pdu is None else "set", + tag = child.handle, + self_handle = ca.handle, + child_handle = child.handle, + bsc_handle = bsc_handle, + bpki_cert = child.certificate)) + + rpkid_query.extend(rpki.left_right.child_elt.make_pdu( + action = "destroy", self_handle = ca.handle, child_handle = c) for c in child_pdus) + + # If caller wants us to poke rpkid, add that to the very end of the message + + if poke: + rpkid_query.append(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = ca.handle, run_now = "yes")) + + # If we changed anything, ship updates off to rpkid + + if rpkid_query: + rpkid_reply = self.call_rpkid(rpkid_query) + bsc_pdus = dict((x.bsc_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.bsc_elt)) + if bsc_handle in bsc_pdus and bsc_pdus[bsc_handle].pkcs10_request: + bsc_req = bsc_pdus[bsc_handle].pkcs10_request + self.check_error_report(rpkid_reply) + + + def synchronize_pubd_core(self): + """ + Configure pubd with data built up by the other commands in this + program. This is the core synchronization code. Don't call this + directly, instead call a methods that calls this inside a Django + commit wrapper. + + This method configures pubd with data built up by the other + commands in this program. Commands which modify IRDB fields + related to pubd should call this when they're done. + """ + + # If we're not running pubd, the rest of this is a waste of time + + if not self.run_pubd: + return + + # Make sure that pubd's BPKI CRL is up to date. + + self.call_pubd(rpki.publication.config_elt.make_pdu( + action = "set", + bpki_crl = self.server_ca.latest_crl)) + + # See what pubd already has on file + + pubd_reply = self.call_pubd(rpki.publication.client_elt.make_pdu(action = "list")) + client_pdus = dict((x.client_handle, x) for x in pubd_reply if isinstance(x, rpki.publication.client_elt)) + pubd_query = [] + + # Check all clients + + for client in self.server_ca.clients.all(): + + client_pdu = client_pdus.pop(client.handle, None) + + if (client_pdu is None or + client_pdu.base_uri != client.sia_base or + client_pdu.bpki_cert != client.certificate): + pubd_query.append(rpki.publication.client_elt.make_pdu( + action = "create" if client_pdu is None else "set", + client_handle = client.handle, + bpki_cert = client.certificate, + base_uri = client.sia_base)) + + # Delete any unknown clients + + pubd_query.extend(rpki.publication.client_elt.make_pdu( + action = "destroy", client_handle = p) for p in client_pdus) + + # If we changed anything, ship updates off to pubd + + if pubd_query: + pubd_reply = self.call_pubd(pubd_query) + self.check_error_report(pubd_reply) + + + def synchronize_rpkid_deleted_core(self): + """ + Remove any <self/> objects present in rpkid's database but not + present in the IRDB. This is the core synchronization code. + Don't call this directly, instead call a methods that calls this + inside a Django commit wrapper. + """ + + rpkid_reply = self.call_rpkid(rpki.left_right.self_elt.make_pdu(action = "list")) + self.check_error_report(rpkid_reply) + + self_handles = set(s.self_handle for s in rpkid_reply) + ca_handles = set(ca.handle for ca in rpki.irdb.ResourceHolderCA.objects.all()) + assert ca_handles <= self_handles + + rpkid_query = [rpki.left_right.self_elt.make_pdu(action = "destroy", self_handle = handle) + for handle in (self_handles - ca_handles)] + + if rpkid_query: + rpkid_reply = self.call_rpkid(rpkid_query) + self.check_error_report(rpkid_reply) + + + @django.db.transaction.commit_on_success + def add_ee_certificate_request(self, pkcs10, resources): + """ + Check a PKCS #10 request to see if it complies with the + specification for a RPKI EE certificate; if it does, add an + EECertificateRequest for it to the IRDB. + + Not yet sure what we want for update and delete semantics here, so + for the moment this is straight addition. See methods like + .load_asns() and .load_prefixes() for other strategies. + """ + + pkcs10.check_valid_request_ee() + ee_request = self.resource_ca.ee_certificate_requests.create( + pkcs10 = pkcs10, + gski = pkcs10.gSKI(), + valid_until = resources.valid_until) + for range in resources.asn: + ee_request.asns.create(start_as = str(range.min), end_as = str(range.max)) + for range in resources.v4: + ee_request.address_ranges.create(start_ip = str(range.min), end_ip = str(range.max), version = 4) + for range in resources.v6: + ee_request.address_ranges.create(start_ip = str(range.min), end_ip = str(range.max), version = 6) + + + @django.db.transaction.commit_on_success + def add_router_certificate_request(self, router_certificate_request_xml, valid_until = None): + """ + Read XML file containing one or more router certificate requests, + attempt to add request(s) to IRDB. + + Check each PKCS #10 request to see if it complies with the + specification for a router certificate; if it does, create an EE + certificate request for it along with the ASN resources and + router-ID supplied in the XML. + """ + + xml = ElementTree(file = router_certificate_request_xml).getroot() + rpki.relaxng.router_certificate.assertValid(xml) + + for req in xml.getiterator(routercert_namespaceQName + "router_certificate_request"): + + pkcs10 = rpki.x509.PKCS10(Base64 = req.text) + router_id = long(req.get("router_id")) + asns = rpki.resource_set.resource_set_as(req.get("asn")) + if not valid_until: + valid_until = req.get("valid_until") + + if valid_until and isinstance(valid_until, (str, unicode)): + valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) + + if not valid_until: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) + elif valid_until < rpki.sundial.now(): + raise PastExpiration, "Specified expiration date %s has already passed" % valid_until + + pkcs10.check_valid_request_router() + + cn = "ROUTER-%08x" % asns[0].min + sn = "%08x" % router_id + + ee_request = self.resource_ca.ee_certificate_requests.create( + pkcs10 = pkcs10, + gski = pkcs10.gSKI(), + valid_until = valid_until, + cn = cn, + sn = sn, + eku = rpki.oids.id_kp_bgpsec_router) + + for range in asns: + ee_request.asns.create(start_as = str(range.min), end_as = str(range.max)) + + + @django.db.transaction.commit_on_success + def delete_router_certificate_request(self, gski): + """ + Delete a router certificate request from this RPKI entity. + """ + + self.resource_ca.ee_certificate_requests.get(gski = gski).delete() diff --git a/rpki/irdbd.py b/rpki/irdbd.py new file mode 100644 index 00000000..41739dc4 --- /dev/null +++ b/rpki/irdbd.py @@ -0,0 +1,266 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +IR database daemon. +""" + +import sys +import os +import time +import argparse +import urlparse +import rpki.http +import rpki.config +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.log +import rpki.x509 +import rpki.daemonize + +class main(object): + + def handle_list_resources(self, q_pdu, r_msg): + child = rpki.irdb.Child.objects.get( + issuer__handle__exact = q_pdu.self_handle, + handle = q_pdu.child_handle) + resources = child.resource_bag + r_pdu = rpki.left_right.list_resources_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.child_handle = q_pdu.child_handle + r_pdu.valid_until = child.valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + r_pdu.asn = resources.asn + r_pdu.ipv4 = resources.v4 + r_pdu.ipv6 = resources.v6 + r_msg.append(r_pdu) + + def handle_list_roa_requests(self, q_pdu, r_msg): + for request in rpki.irdb.ROARequest.objects.raw(""" + SELECT irdb_roarequest.* + FROM irdb_roarequest, irdb_resourceholderca + WHERE irdb_roarequest.issuer_id = irdb_resourceholderca.id + AND irdb_resourceholderca.handle = %s + """, [q_pdu.self_handle]): + prefix_bag = request.roa_prefix_bag + r_pdu = rpki.left_right.list_roa_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.asn = request.asn + r_pdu.ipv4 = prefix_bag.v4 + r_pdu.ipv6 = prefix_bag.v6 + r_msg.append(r_pdu) + + def handle_list_ghostbuster_requests(self, q_pdu, r_msg): + ghostbusters = rpki.irdb.GhostbusterRequest.objects.filter( + issuer__handle__exact = q_pdu.self_handle, + parent__handle__exact = q_pdu.parent_handle) + if ghostbusters.count() == 0: + ghostbusters = rpki.irdb.GhostbusterRequest.objects.filter( + issuer__handle__exact = q_pdu.self_handle, + parent = None) + for ghostbuster in ghostbusters: + r_pdu = rpki.left_right.list_ghostbuster_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.parent_handle = q_pdu.parent_handle + r_pdu.vcard = ghostbuster.vcard + r_msg.append(r_pdu) + + def handle_list_ee_certificate_requests(self, q_pdu, r_msg): + for ee_req in rpki.irdb.EECertificateRequest.objects.filter(issuer__handle__exact = q_pdu.self_handle): + resources = ee_req.resource_bag + r_pdu = rpki.left_right.list_ee_certificate_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.gski = ee_req.gski + r_pdu.valid_until = ee_req.valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + r_pdu.asn = resources.asn + r_pdu.ipv4 = resources.v4 + r_pdu.ipv6 = resources.v6 + r_pdu.cn = ee_req.cn + r_pdu.sn = ee_req.sn + r_pdu.eku = ee_req.eku + r_pdu.pkcs10 = ee_req.pkcs10 + r_msg.append(r_pdu) + + def handler(self, query, path, cb): + try: + q_pdu = None + r_msg = rpki.left_right.msg.reply() + from django.db import connection + connection.cursor() # Reconnect to mysqld if necessary + self.start_new_transaction() + serverCA = rpki.irdb.ServerCA.objects.get() + rpkid = serverCA.ee_certificates.get(purpose = "rpkid") + try: + q_cms = rpki.left_right.cms_msg(DER = query) + q_msg = q_cms.unwrap((serverCA.certificate, rpkid.certificate)) + self.cms_timestamp = q_cms.check_replay(self.cms_timestamp, path) + if not isinstance(q_msg, rpki.left_right.msg) or not q_msg.is_query(): + raise rpki.exceptions.BadQuery("Unexpected %r PDU" % q_msg) + for q_pdu in q_msg: + self.dispatch(q_pdu, r_msg) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + if q_pdu is None: + r_msg.append(rpki.left_right.report_error_elt.from_exception(e)) + else: + r_msg.append(rpki.left_right.report_error_elt.from_exception(e, q_pdu.self_handle, q_pdu.tag)) + irdbd = serverCA.ee_certificates.get(purpose = "irdbd") + cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, irdbd.private_key, irdbd.certificate)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + def dispatch(self, q_pdu, r_msg): + try: + handler = self.dispatch_vector[type(q_pdu)] + except KeyError: + raise rpki.exceptions.BadQuery("Unexpected %r PDU" % q_pdu) + else: + handler(q_pdu, r_msg) + + def __init__(self, **kwargs): + + global rpki # pylint: disable=W0602 + + os.environ["TZ"] = "UTC" + time.tzset() + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + parser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + args = parser.parse_args() + + rpki.log.init("irdbd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "irdbd") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + if args.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main) + finally: + prof.dump_stats(args.profile) + rpki.log.info("Dumped profile data to %s" % args.profile) + else: + self.main() + + def main(self): + + global rpki # pylint: disable=W0602 + from django.conf import settings + + startup_msg = self.cfg.get("startup-message", "") + if startup_msg: + rpki.log.info(startup_msg) + + # Do -not- turn on DEBUG here except for short-lived tests, + # otherwise irdbd will eventually run out of memory and crash. + # + # If you must enable debugging, use django.db.reset_queries() to + # clear the query list manually, but it's probably better just to + # run with debugging disabled, since that's the expectation for + # production code. + # + # https://docs.djangoproject.com/en/dev/faq/models/#why-is-django-leaking-memory + + settings.configure( + DATABASES = { + "default" : { + "ENGINE" : "django.db.backends.mysql", + "NAME" : self.cfg.get("sql-database"), + "USER" : self.cfg.get("sql-username"), + "PASSWORD" : self.cfg.get("sql-password"), + "HOST" : "", + "PORT" : "" }}, + INSTALLED_APPS = ("rpki.irdb",),) + + import rpki.irdb # pylint: disable=W0621 + + # Entirely too much fun with read-only access to transactional databases. + # + # http://stackoverflow.com/questions/3346124/how-do-i-force-django-to-ignore-any-caches-and-reload-data + # http://devblog.resolversystems.com/?p=439 + # http://groups.google.com/group/django-users/browse_thread/thread/e25cec400598c06d + # http://stackoverflow.com/questions/1028671/python-mysqldb-update-query-fails + # http://dev.mysql.com/doc/refman/5.0/en/set-transaction.html + # + # It turns out that MySQL is doing us a favor with this weird + # transactional behavior on read, because without it there's a + # race condition if multiple updates are committed to the IRDB + # while we're in the middle of processing a query. Note that + # proper transaction management by the committers doesn't protect + # us, this is a transactional problem on read. So we need to use + # explicit transaction management. Since irdbd is a read-only + # consumer of IRDB data, this means we need to commit an empty + # transaction at the beginning of processing each query, to reset + # the transaction isolation snapshot. + + import django.db.transaction + self.start_new_transaction = django.db.transaction.commit_manually(django.db.transaction.commit) + + self.dispatch_vector = { + rpki.left_right.list_resources_elt : self.handle_list_resources, + rpki.left_right.list_roa_requests_elt : self.handle_list_roa_requests, + rpki.left_right.list_ghostbuster_requests_elt : self.handle_list_ghostbuster_requests, + rpki.left_right.list_ee_certificate_requests_elt : self.handle_list_ee_certificate_requests} + + try: + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + except: + # + # Backwards compatibility, remove this eventually. + # + u = urlparse.urlparse(self.cfg.get("http-url")) + if (u.scheme not in ("", "http") or + u.username is not None or + u.password is not None or + u.params or u.query or u.fragment): + raise + self.http_server_host = u.hostname + self.http_server_port = int(u.port) + + self.cms_timestamp = None + + rpki.http.server( + host = self.http_server_host, + port = self.http_server_port, + handlers = self.handler) diff --git a/rpki/left_right.py b/rpki/left_right.py new file mode 100644 index 00000000..2d46cdfa --- /dev/null +++ b/rpki/left_right.py @@ -0,0 +1,1300 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI "left-right" protocol. +""" + +import rpki.resource_set +import rpki.x509 +import rpki.sql +import rpki.exceptions +import rpki.xml_utils +import rpki.http +import rpki.up_down +import rpki.relaxng +import rpki.sundial +import rpki.log +import rpki.publication +import rpki.async +import rpki.rpkid_tasks + +## @var enforce_strict_up_down_xml_sender +# Enforce strict checking of XML "sender" field in up-down protocol + +enforce_strict_up_down_xml_sender = False + +class left_right_namespace(object): + """ + XML namespace parameters for left-right protocol. + """ + + xmlns = "http://www.hactrn.net/uris/rpki/left-right-spec/" + nsmap = { None : xmlns } + +class data_elt(rpki.xml_utils.data_elt, rpki.sql.sql_persistent, left_right_namespace): + """ + Virtual class for top-level left-right protocol data elements. + """ + + handles = () + + self_id = None + self_handle = None + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this object links. + """ + return self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def bsc(self): + """ + Return BSC object to which this object links. + """ + return bsc_elt.sql_fetch(self.gctx, self.bsc_id) + + def make_reply_clone_hook(self, r_pdu): + """ + Set handles when cloning, including _id -> _handle translation. + """ + if r_pdu.self_handle is None: + r_pdu.self_handle = self.self_handle + for tag, elt in self.handles: + id_name = tag + "_id" + handle_name = tag + "_handle" + if getattr(r_pdu, handle_name, None) is None: + try: + setattr(r_pdu, handle_name, getattr(elt.sql_fetch(self.gctx, getattr(r_pdu, id_name)), handle_name)) + except AttributeError: + continue + + @classmethod + def serve_fetch_handle(cls, gctx, self_id, handle): + """ + Find an object based on its handle. + """ + return cls.sql_fetch_where1(gctx, cls.element_name + "_handle = %s AND self_id = %s", (handle, self_id)) + + def serve_fetch_one_maybe(self): + """ + Find the object on which a get, set, or destroy method should + operate, or which would conflict with a create method. + """ + where = "%s.%s_handle = %%s AND %s.self_id = self.self_id AND self.self_handle = %%s" % ((self.element_name,) * 3) + args = (getattr(self, self.element_name + "_handle"), self.self_handle) + return self.sql_fetch_where1(self.gctx, where, args, "self") + + def serve_fetch_all(self): + """ + Find the objects on which a list method should operate. + """ + where = "%s.self_id = self.self_id and self.self_handle = %%s" % self.element_name + return self.sql_fetch_where(self.gctx, where, (self.self_handle,), "self") + + def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Hook to do _handle => _id translation before saving. + + self is always the object to be saved to SQL. For create + operations, self and q_pdu are be the same object; for set + operations, self is the pre-existing object from SQL and q_pdu is + the set request received from the the IRBE. + """ + for tag, elt in self.handles: + id_name = tag + "_id" + if getattr(self, id_name, None) is None: + x = elt.serve_fetch_handle(self.gctx, self.self_id, getattr(q_pdu, tag + "_handle")) + if x is None: + raise rpki.exceptions.HandleTranslationError, "Could not translate %r %s_handle" % (self, tag) + setattr(self, id_name, getattr(x, id_name)) + cb() + +class self_elt(data_elt): + """ + <self/> element. + """ + + element_name = "self" + attributes = ("action", "tag", "self_handle", "crl_interval", "regen_margin") + elements = ("bpki_cert", "bpki_glue") + booleans = ("rekey", "reissue", "revoke", "run_now", "publish_world_now", "revoke_forgotten", + "clear_replay_protection") + + sql_template = rpki.sql.template( + "self", + "self_id", + "self_handle", + "use_hsm", + "crl_interval", + "regen_margin", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509)) + + handles = () + + use_hsm = False + crl_interval = None + regen_margin = None + bpki_cert = None + bpki_glue = None + cron_tasks = None + + def __repr__(self): + return rpki.log.log_repr(self) + + @property + def bscs(self): + """ + Fetch all BSC objects that link to this self object. + """ + return bsc_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def repositories(self): + """ + Fetch all repository objects that link to this self object. + """ + return repository_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def parents(self): + """ + Fetch all parent objects that link to this self object. + """ + return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def children(self): + """ + Fetch all child objects that link to this self object. + """ + return child_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def roas(self): + """ + Fetch all ROA objects that link to this self object. + """ + return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def ghostbusters(self): + """ + Fetch all Ghostbuster record objects that link to this self object. + """ + return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def ee_certificates(self): + """ + Fetch all EE certificate objects that link to this self object. + """ + return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for self_elt. + """ + rpki.log.trace() + actions = [] + if q_pdu.rekey: + actions.append(self.serve_rekey) + if q_pdu.revoke: + actions.append(self.serve_revoke) + if q_pdu.reissue: + actions.append(self.serve_reissue) + if q_pdu.revoke_forgotten: + actions.append(self.serve_revoke_forgotten) + if q_pdu.publish_world_now: + actions.append(self.serve_publish_world_now) + if q_pdu.run_now: + actions.append(self.serve_run_now) + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_rekey(self, cb, eb): + """ + Handle a left-right rekey action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_rekey(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_revoke(self, cb, eb): + """ + Handle a left-right revoke action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_revoke(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_reissue(self, cb, eb): + """ + Handle a left-right reissue action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_reissue(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_revoke_forgotten(self, cb, eb): + """ + Handle a left-right revoke_forgotten action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_revoke_forgotten(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this self. + """ + rpki.log.trace() + def loop(iterator, obj): + obj.serve_clear_replay_protection(iterator, eb) + rpki.async.iterator(self.parents + self.children + self.repositories, loop, cb) + + def serve_destroy_hook(self, cb, eb): + """ + Extra cleanup actions when destroying a self_elt. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.delete(iterator) + rpki.async.iterator(self.parents, loop, cb) + + + def serve_publish_world_now(self, cb, eb): + """ + Handle a left-right publish_world_now action for this self. + + The publication stuff needs refactoring, right now publication is + interleaved with local operations in a way that forces far too + many bounces through the task system for any complex update. The + whole thing ought to be rewritten to queue up outgoing publication + PDUs and only send them when we're all done or when we need to + force publication at a particular point in a multi-phase operation. + + Once that reorganization has been done, this method should be + rewritten to reuse the low-level publish() methods that each + object will have...but we're not there yet. So, for now, we just + do this via brute force. Think of it as a trial version to see + whether we've identified everything that needs to be republished + for this operation. + """ + + def loop(iterator, parent): + q_msg = rpki.publication.msg.query() + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + q_msg.append(rpki.publication.crl_elt.make_publish( + ca_detail.crl_uri, ca_detail.latest_crl)) + q_msg.append(rpki.publication.manifest_elt.make_publish( + ca_detail.manifest_uri, ca_detail.latest_manifest)) + q_msg.extend(rpki.publication.certificate_elt.make_publish( + c.uri, c.cert) for c in ca_detail.child_certs) + q_msg.extend(rpki.publication.roa_elt.make_publish( + r.uri, r.roa) for r in ca_detail.roas if r.roa is not None) + q_msg.extend(rpki.publication.ghostbuster_elt.make_publish( + g.uri, g.ghostbuster) for g in ca_detail.ghostbusters) + parent.repository.call_pubd(iterator, eb, q_msg) + + rpki.async.iterator(self.parents, loop, cb) + + def serve_run_now(self, cb, eb): + """ + Handle a left-right run_now action for this self. + """ + rpki.log.debug("Forced immediate run of periodic actions for self %s[%d]" % ( + self.self_handle, self.self_id)) + completion = rpki.rpkid_tasks.CompletionHandler(cb) + self.schedule_cron_tasks(completion) + assert completion.count > 0 + self.gctx.task_run() + + def serve_fetch_one_maybe(self): + """ + Find the self object upon which a get, set, or destroy action + should operate, or which would conflict with a create method. + """ + return self.serve_fetch_handle(self.gctx, None, self.self_handle) + + @classmethod + def serve_fetch_handle(cls, gctx, self_id, self_handle): + """ + Find a self object based on its self_handle. + """ + return cls.sql_fetch_where1(gctx, "self_handle = %s", self_handle) + + def serve_fetch_all(self): + """ + Find the self objects upon which a list action should operate. + This is different from the list action for all other objects, + where list only works within a given self_id context. + """ + return self.sql_fetch_all(self.gctx) + + def schedule_cron_tasks(self, completion): + """ + Schedule periodic tasks. + """ + + if self.cron_tasks is None: + self.cron_tasks = tuple(task(self) for task in rpki.rpkid_tasks.task_classes) + + for task in self.cron_tasks: + self.gctx.task_add(task) + completion.register(task) + + def find_covering_ca_details(self, resources): + """ + Return all active ca_detail_objs for this <self/> which cover a + particular set of resources. + + If we expected there to be a large number of ca_detail_objs, we + could add index tables and write fancy SQL query to do this, but + for the expected common case where there are only one or two + active ca_detail_objs per <self/>, it's probably not worth it. In + any case, this is an optimization we can leave for later. + """ + + results = set() + for parent in self.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None and ca_detail.covers(resources): + results.add(ca_detail) + return results + + +class bsc_elt(data_elt): + """ + <bsc/> (Business Signing Context) element. + """ + + element_name = "bsc" + attributes = ("action", "tag", "self_handle", "bsc_handle", "key_type", "hash_alg", "key_length") + elements = ("signing_cert", "signing_cert_crl", "pkcs10_request") + booleans = ("generate_keypair",) + + sql_template = rpki.sql.template( + "bsc", + "bsc_id", + "bsc_handle", + "self_id", + "hash_alg", + ("private_key_id", rpki.x509.RSA), + ("pkcs10_request", rpki.x509.PKCS10), + ("signing_cert", rpki.x509.X509), + ("signing_cert_crl", rpki.x509.CRL)) + + handles = (("self", self_elt),) + + private_key_id = None + pkcs10_request = None + signing_cert = None + signing_cert_crl = None + + def __repr__(self): + return rpki.log.log_repr(self, self.bsc_handle) + + @property + def repositories(self): + """ + Fetch all repository objects that link to this BSC object. + """ + return repository_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) + + @property + def parents(self): + """ + Fetch all parent objects that link to this BSC object. + """ + return parent_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) + + @property + def children(self): + """ + Fetch all child objects that link to this BSC object. + """ + return child_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) + + def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for bsc_elt -- handle key generation. For + now this only allows RSA with SHA-256. + """ + if q_pdu.generate_keypair: + assert q_pdu.key_type in (None, "rsa") and q_pdu.hash_alg in (None, "sha256") + self.private_key_id = rpki.x509.RSA.generate(keylength = q_pdu.key_length or 2048) + self.pkcs10_request = rpki.x509.PKCS10.create(keypair = self.private_key_id) + r_pdu.pkcs10_request = self.pkcs10_request + data_elt.serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb) + +class repository_elt(data_elt): + """ + <repository/> element. + """ + + element_name = "repository" + attributes = ("action", "tag", "self_handle", "repository_handle", "bsc_handle", "peer_contact_uri") + elements = ("bpki_cert", "bpki_glue") + booleans = ("clear_replay_protection",) + + sql_template = rpki.sql.template( + "repository", + "repository_id", + "repository_handle", + "self_id", + "bsc_id", + "peer_contact_uri", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + handles = (("self", self_elt), + ("bsc", bsc_elt)) + + bpki_cert = None + bpki_glue = None + last_cms_timestamp = None + + def __repr__(self): + return rpki.log.log_repr(self, self.repository_handle) + + @property + def parents(self): + """ + Fetch all parent objects that link to this repository object. + """ + return parent_elt.sql_fetch_where(self.gctx, "repository_id = %s", (self.repository_id,)) + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for repository_elt. + """ + actions = [] + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this repository. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + @staticmethod + def default_pubd_handler(pdu): + """ + Default handler for publication response PDUs. + """ + pdu.raise_if_error() + + def call_pubd(self, callback, errback, q_msg, handlers = None): + """ + Send a message to publication daemon and return the response. + + As a convenience, attempting to send an empty message returns + immediate success without sending anything. + + Handlers is a dict of handler functions to process the response + PDUs. If the tag value in the response PDU appears in the dict, + the associated handler is called to process the PDU. If no tag + matches, default_pubd_handler() is called. A handler value of + False suppresses calling of the default handler. + """ + + try: + rpki.log.trace() + + self.gctx.sql.sweep() + + if not q_msg: + return callback() + + if handlers is None: + handlers = {} + + for q_pdu in q_msg: + rpki.log.info("Sending %s %s to pubd" % (q_pdu.action, q_pdu.uri)) + + bsc = self.bsc + q_der = rpki.publication.cms_msg().wrap(q_msg, bsc.private_key_id, bsc.signing_cert, bsc.signing_cert_crl) + bpki_ta_path = (self.gctx.bpki_ta, self.self.bpki_cert, self.self.bpki_glue, self.bpki_cert, self.bpki_glue) + + def done(r_der): + try: + rpki.log.debug("Received response from pubd") + r_cms = rpki.publication.cms_msg(DER = r_der) + r_msg = r_cms.unwrap(bpki_ta_path) + r_cms.check_replay_sql(self, self.peer_contact_uri) + for r_pdu in r_msg: + handler = handlers.get(r_pdu.tag, self.default_pubd_handler) + if handler: + rpki.log.debug("Calling pubd handler %r" % handler) + handler(r_pdu) + if len(q_msg) != len(r_msg): + raise rpki.exceptions.BadPublicationReply, "Wrong number of response PDUs from pubd: sent %r, got %r" % (q_msg, r_msg) + callback() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + errback(e) + + rpki.log.debug("Sending request to pubd") + rpki.http.client( + url = self.peer_contact_uri, + msg = q_der, + callback = done, + errback = errback) + + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + errback(e) + +class parent_elt(data_elt): + """ + <parent/> element. + """ + + element_name = "parent" + attributes = ("action", "tag", "self_handle", "parent_handle", "bsc_handle", "repository_handle", + "peer_contact_uri", "sia_base", "sender_name", "recipient_name") + elements = ("bpki_cms_cert", "bpki_cms_glue") + booleans = ("rekey", "reissue", "revoke", "revoke_forgotten", "clear_replay_protection") + + sql_template = rpki.sql.template( + "parent", + "parent_id", + "parent_handle", + "self_id", + "bsc_id", + "repository_id", + "peer_contact_uri", + "sia_base", + "sender_name", + "recipient_name", + ("bpki_cms_cert", rpki.x509.X509), + ("bpki_cms_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + handles = (("self", self_elt), + ("bsc", bsc_elt), + ("repository", repository_elt)) + + bpki_cms_cert = None + bpki_cms_glue = None + last_cms_timestamp = None + + def __repr__(self): + return rpki.log.log_repr(self, self.parent_handle) + + @property + @rpki.sql.cache_reference + def repository(self): + """ + Fetch repository object to which this parent object links. + """ + return repository_elt.sql_fetch(self.gctx, self.repository_id) + + @property + def cas(self): + """ + Fetch all CA objects that link to this parent object. + """ + return rpki.rpkid.ca_obj.sql_fetch_where(self.gctx, "parent_id = %s", (self.parent_id,)) + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for parent_elt. + """ + actions = [] + if q_pdu.rekey: + actions.append(self.serve_rekey) + if q_pdu.revoke: + actions.append(self.serve_revoke) + if q_pdu.reissue: + actions.append(self.serve_reissue) + if q_pdu.revoke_forgotten: + actions.append(self.serve_revoke_forgotten) + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_rekey(self, cb, eb): + """ + Handle a left-right rekey action for this parent. + """ + def loop(iterator, ca): + ca.rekey(iterator, eb) + rpki.async.iterator(self.cas, loop, cb) + + def serve_revoke(self, cb, eb): + """ + Handle a left-right revoke action for this parent. + """ + def loop(iterator, ca): + ca.revoke(cb = iterator, eb = eb) + rpki.async.iterator(self.cas, loop, cb) + + def serve_reissue(self, cb, eb): + """ + Handle a left-right reissue action for this parent. + """ + def loop(iterator, ca): + ca.reissue(cb = iterator, eb = eb) + rpki.async.iterator(self.cas, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this parent. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + + def get_skis(self, cb, eb): + """ + Fetch SKIs that this parent thinks we have. In theory this should + agree with our own database, but in practice stuff can happen, so + sometimes we need to know what our parent thinks. + + Result is a dictionary with the resource class name as key and a + set of SKIs as value. + """ + + def done(r_msg): + cb(dict((rc.class_name, set(c.cert.gSKI() for c in rc.certs)) + for rc in r_msg.payload.classes)) + + rpki.up_down.list_pdu.query(self, done, eb) + + + def revoke_skis(self, rc_name, skis_to_revoke, cb, eb): + """ + Revoke a set of SKIs within a particular resource class. + """ + + def loop(iterator, ski): + rpki.log.debug("Asking parent %r to revoke class %r, SKI %s" % (self, rc_name, ski)) + q_pdu = rpki.up_down.revoke_pdu() + q_pdu.class_name = rc_name + q_pdu.ski = ski + self.query_up_down(q_pdu, lambda r_pdu: iterator(), eb) + + rpki.async.iterator(skis_to_revoke, loop, cb) + + + def serve_revoke_forgotten(self, cb, eb): + """ + Handle a left-right revoke_forgotten action for this parent. + + This is a bit fiddly: we have to compare the result of an up-down + list query with what we have locally and identify the SKIs of any + certificates that have gone missing. This should never happen in + ordinary operation, but can arise if we have somehow lost a + private key, in which case there is nothing more we can do with + the issued cert, so we have to clear it. As this really is not + supposed to happen, we don't clear it automatically, instead we + require an explicit trigger. + """ + + def got_skis(skis_from_parent): + + def loop(iterator, item): + rc_name, skis_to_revoke = item + if rc_name in ca_map: + for ca_detail in ca_map[rc_name].issue_response_candidate_ca_details: + skis_to_revoke.discard(ca_detail.latest_ca_cert.gSKI()) + self.revoke_skis(rc_name, skis_to_revoke, iterator, eb) + + ca_map = dict((ca.parent_resource_class, ca) for ca in self.cas) + rpki.async.iterator(skis_from_parent.items(), loop, cb) + + self.get_skis(got_skis, eb) + + + def delete(self, cb, delete_parent = True): + """ + Delete all the CA stuff under this parent, and perhaps the parent + itself. + """ + + def loop(iterator, ca): + self.gctx.checkpoint() + ca.delete(self, iterator) + + def revoke(): + self.gctx.checkpoint() + self.serve_revoke_forgotten(done, fail) + + def fail(e): + rpki.log.warn("Trouble getting parent to revoke certificates, blundering onwards: %s" % e) + done() + + def done(): + self.gctx.checkpoint() + self.gctx.sql.sweep() + if delete_parent: + self.sql_delete() + cb() + + rpki.async.iterator(self.cas, loop, revoke) + + + def serve_destroy_hook(self, cb, eb): + """ + Extra server actions when destroying a parent_elt. + """ + + self.delete(cb, delete_parent = False) + + + def query_up_down(self, q_pdu, cb, eb): + """ + Client code for sending one up-down query PDU to this parent. + """ + + rpki.log.trace() + + bsc = self.bsc + if bsc is None: + raise rpki.exceptions.BSCNotFound, "Could not find BSC %s" % self.bsc_id + + if bsc.signing_cert is None: + raise rpki.exceptions.BSCNotReady, "BSC %r[%s] is not yet usable" % (bsc.bsc_handle, bsc.bsc_id) + + q_msg = rpki.up_down.message_pdu.make_query( + payload = q_pdu, + sender = self.sender_name, + recipient = self.recipient_name) + + q_der = rpki.up_down.cms_msg().wrap(q_msg, bsc.private_key_id, + bsc.signing_cert, + bsc.signing_cert_crl) + + def unwrap(r_der): + try: + r_cms = rpki.up_down.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((self.gctx.bpki_ta, + self.self.bpki_cert, + self.self.bpki_glue, + self.bpki_cms_cert, + self.bpki_cms_glue)) + r_cms.check_replay_sql(self, self.peer_contact_uri) + r_msg.payload.check_response() + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + eb(e) + else: + cb(r_msg) + + rpki.http.client( + msg = q_der, + url = self.peer_contact_uri, + callback = unwrap, + errback = eb) + +class child_elt(data_elt): + """ + <child/> element. + """ + + element_name = "child" + attributes = ("action", "tag", "self_handle", "child_handle", "bsc_handle") + elements = ("bpki_cert", "bpki_glue") + booleans = ("reissue", "clear_replay_protection") + + sql_template = rpki.sql.template( + "child", + "child_id", + "child_handle", + "self_id", + "bsc_id", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + handles = (("self", self_elt), + ("bsc", bsc_elt)) + + bpki_cert = None + bpki_glue = None + last_cms_timestamp = None + + def __repr__(self): + return rpki.log.log_repr(self, self.child_handle) + + def fetch_child_certs(self, ca_detail = None, ski = None, unique = False): + """ + Fetch all child_cert objects that link to this child object. + """ + return rpki.rpkid.child_cert_obj.fetch(self.gctx, self, ca_detail, ski, unique) + + @property + def child_certs(self): + """ + Fetch all child_cert objects that link to this child object. + """ + return self.fetch_child_certs() + + @property + def parents(self): + """ + Fetch all parent objects that link to self object to which this child object links. + """ + return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for child_elt. + """ + actions = [] + if q_pdu.reissue: + actions.append(self.serve_reissue) + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_reissue(self, cb, eb): + """ + Handle a left-right reissue action for this child. + """ + publisher = rpki.rpkid.publication_queue() + for child_cert in self.child_certs: + child_cert.reissue(child_cert.ca_detail, publisher, force = True) + publisher.call_pubd(cb, eb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this child. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + def ca_from_class_name(self, class_name): + """ + Fetch the CA corresponding to an up-down class_name. + """ + if not class_name.isdigit(): + raise rpki.exceptions.BadClassNameSyntax, "Bad class name %s" % class_name + ca = rpki.rpkid.ca_obj.sql_fetch(self.gctx, long(class_name)) + if ca is None: + raise rpki.exceptions.ClassNameUnknown, "Unknown class name %s" % class_name + parent = ca.parent + if self.self_id != parent.self_id: + raise rpki.exceptions.ClassNameMismatch( + "Class name mismatch: child.self_id = %d, parent.self_id = %d" % ( + self.self_id, parent.self_id)) + return ca + + def serve_destroy_hook(self, cb, eb): + """ + Extra server actions when destroying a child_elt. + """ + publisher = rpki.rpkid.publication_queue() + for child_cert in self.child_certs: + child_cert.revoke(publisher = publisher, + generate_crl_and_manifest = True) + publisher.call_pubd(cb, eb) + + def serve_up_down(self, query, callback): + """ + Outer layer of server handling for one up-down PDU from this child. + """ + + rpki.log.trace() + + bsc = self.bsc + if bsc is None: + raise rpki.exceptions.BSCNotFound, "Could not find BSC %s" % self.bsc_id + q_cms = rpki.up_down.cms_msg(DER = query) + q_msg = q_cms.unwrap((self.gctx.bpki_ta, + self.self.bpki_cert, + self.self.bpki_glue, + self.bpki_cert, + self.bpki_glue)) + q_cms.check_replay_sql(self, "child", self.child_handle) + q_msg.payload.gctx = self.gctx + if enforce_strict_up_down_xml_sender and q_msg.sender != self.child_handle: + raise rpki.exceptions.BadSender, "Unexpected XML sender %s" % q_msg.sender + self.gctx.sql.sweep() + + def done(r_msg): + # + # Exceptions from this point on are problematic, as we have no + # sane way of reporting errors in the error reporting mechanism. + # May require refactoring, ignore the issue for now. + # + reply = rpki.up_down.cms_msg().wrap(r_msg, bsc.private_key_id, + bsc.signing_cert, bsc.signing_cert_crl) + callback(reply) + + try: + q_msg.serve_top_level(self, done) + except (rpki.async.ExitNow, SystemExit): + raise + except rpki.exceptions.NoActiveCA, data: + done(q_msg.serve_error(data)) + except Exception, e: + rpki.log.traceback() + done(q_msg.serve_error(e)) + +class list_resources_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + <list_resources/> element. + """ + + element_name = "list_resources" + attributes = ("self_handle", "tag", "child_handle", "valid_until", "asn", "ipv4", "ipv6") + valid_until = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.child_handle, self.asn, self.ipv4, self.ipv6) + + def startElement(self, stack, name, attrs): + """ + Handle <list_resources/> element. This requires special handling + due to the data types of some of the attributes. + """ + assert name == "list_resources", "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + if isinstance(self.valid_until, str): + self.valid_until = rpki.sundial.datetime.fromXMLtime(self.valid_until) + if self.asn is not None: + self.asn = rpki.resource_set.resource_set_as(self.asn) + if self.ipv4 is not None: + self.ipv4 = rpki.resource_set.resource_set_ipv4(self.ipv4) + if self.ipv6 is not None: + self.ipv6 = rpki.resource_set.resource_set_ipv6(self.ipv6) + + def toXML(self): + """ + Generate <list_resources/> element. This requires special + handling due to the data types of some of the attributes. + """ + elt = self.make_elt() + if isinstance(self.valid_until, int): + elt.set("valid_until", self.valid_until.toXMLtime()) + return elt + +class list_roa_requests_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + <list_roa_requests/> element. + """ + + element_name = "list_roa_requests" + attributes = ("self_handle", "tag", "asn", "ipv4", "ipv6") + + def startElement(self, stack, name, attrs): + """ + Handle <list_roa_requests/> element. This requires special handling + due to the data types of some of the attributes. + """ + assert name == "list_roa_requests", "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + if self.ipv4 is not None: + self.ipv4 = rpki.resource_set.roa_prefix_set_ipv4(self.ipv4) + if self.ipv6 is not None: + self.ipv6 = rpki.resource_set.roa_prefix_set_ipv6(self.ipv6) + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.asn, self.ipv4, self.ipv6) + +class list_ghostbuster_requests_elt(rpki.xml_utils.text_elt, left_right_namespace): + """ + <list_ghostbuster_requests/> element. + """ + + element_name = "list_ghostbuster_requests" + attributes = ("self_handle", "tag", "parent_handle") + text_attribute = "vcard" + + vcard = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.parent_handle) + +class list_ee_certificate_requests_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + <list_ee_certificate_requests/> element. + """ + + element_name = "list_ee_certificate_requests" + attributes = ("self_handle", "tag", "gski", "valid_until", "asn", "ipv4", "ipv6", "cn", "sn", "eku") + elements = ("pkcs10",) + + pkcs10 = None + valid_until = None + eku = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.gski, self.cn, self.sn, self.asn, self.ipv4, self.ipv6) + + def startElement(self, stack, name, attrs): + """ + Handle <list_ee_certificate_requests/> element. This requires special + handling due to the data types of some of the attributes. + """ + if name not in self.elements: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + if isinstance(self.valid_until, str): + self.valid_until = rpki.sundial.datetime.fromXMLtime(self.valid_until) + if self.asn is not None: + self.asn = rpki.resource_set.resource_set_as(self.asn) + if self.ipv4 is not None: + self.ipv4 = rpki.resource_set.resource_set_ipv4(self.ipv4) + if self.ipv6 is not None: + self.ipv6 = rpki.resource_set.resource_set_ipv6(self.ipv6) + if self.eku is not None: + self.eku = self.eku.split(",") + + def endElement(self, stack, name, text): + """ + Handle <pkcs10/> sub-element. + """ + assert len(self.elements) == 1 + if name == self.elements[0]: + self.pkcs10 = rpki.x509.PKCS10(Base64 = text) + else: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Generate <list_ee_certificate_requests/> element. This requires special + handling due to the data types of some of the attributes. + """ + if isinstance(self.eku, (tuple, list)): + self.eku = ",".join(self.eku) + elt = self.make_elt() + for i in self.elements: + self.make_b64elt(elt, i, getattr(self, i, None)) + if isinstance(self.valid_until, int): + elt.set("valid_until", self.valid_until.toXMLtime()) + return elt + +class list_published_objects_elt(rpki.xml_utils.text_elt, left_right_namespace): + """ + <list_published_objects/> element. + """ + + element_name = "list_published_objects" + attributes = ("self_handle", "tag", "uri", "child_handle") + text_attribute = "obj" + + obj = None + child_handle = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.child_handle, self.uri) + + def serve_dispatch(self, r_msg, cb, eb): + """ + Handle a <list_published_objects/> query. The method name is a + misnomer here, there's no action attribute and no dispatch, we + just dump every published object for the specified <self/> and return. + """ + for parent in self_elt.serve_fetch_handle(self.gctx, None, self.self_handle).parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + r_msg.append(self.make_reply(ca_detail.crl_uri, ca_detail.latest_crl)) + r_msg.append(self.make_reply(ca_detail.manifest_uri, ca_detail.latest_manifest)) + r_msg.extend(self.make_reply(c.uri, c.cert, c.child.child_handle) + for c in ca_detail.child_certs) + r_msg.extend(self.make_reply(r.uri, r.roa) + for r in ca_detail.roas if r.roa is not None) + r_msg.extend(self.make_reply(g.uri, g.ghostbuster) + for g in ca_detail.ghostbusters) + r_msg.extend(self.make_reply(c.uri, c.cert) + for c in ca_detail.ee_certificates) + cb() + + def make_reply(self, uri, obj, child_handle = None): + """ + Generate one reply PDU. + """ + r_pdu = self.make_pdu(tag = self.tag, self_handle = self.self_handle, + uri = uri, child_handle = child_handle) + r_pdu.obj = obj.get_Base64() + return r_pdu + +class list_received_resources_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + <list_received_resources/> element. + """ + + element_name = "list_received_resources" + attributes = ("self_handle", "tag", "parent_handle", + "notBefore", "notAfter", "uri", "sia_uri", "aia_uri", "asn", "ipv4", "ipv6") + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.parent_handle, self.uri, self.notAfter) + + def serve_dispatch(self, r_msg, cb, eb): + """ + Handle a <list_received_resources/> query. The method name is a + misnomer here, there's no action attribute and no dispatch, we + just dump a bunch of data about every certificate issued to us by + one of our parents, then return. + """ + for parent in self_elt.serve_fetch_handle(self.gctx, None, self.self_handle).parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None and ca_detail.latest_ca_cert is not None: + r_msg.append(self.make_reply(parent.parent_handle, ca_detail.ca_cert_uri, ca_detail.latest_ca_cert)) + cb() + + def make_reply(self, parent_handle, uri, cert): + """ + Generate one reply PDU. + """ + resources = cert.get_3779resources() + return self.make_pdu( + tag = self.tag, + self_handle = self.self_handle, + parent_handle = parent_handle, + notBefore = str(cert.getNotBefore()), + notAfter = str(cert.getNotAfter()), + uri = uri, + sia_uri = cert.get_sia_directory_uri(), + aia_uri = cert.get_aia_uri(), + asn = resources.asn, + ipv4 = resources.v4, + ipv6 = resources.v6) + +class report_error_elt(rpki.xml_utils.text_elt, left_right_namespace): + """ + <report_error/> element. + """ + + element_name = "report_error" + attributes = ("tag", "self_handle", "error_code") + text_attribute = "error_text" + + error_text = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.error_code) + + @classmethod + def from_exception(cls, e, self_handle = None, tag = None): + """ + Generate a <report_error/> element from an exception. + """ + self = cls() + self.self_handle = self_handle + self.tag = tag + self.error_code = e.__class__.__name__ + self.error_text = str(e) + return self + +class msg(rpki.xml_utils.msg, left_right_namespace): + """ + Left-right PDU. + """ + + ## @var version + # Protocol version + version = 1 + + ## @var pdus + # Dispatch table of PDUs for this protocol. + pdus = dict((x.element_name, x) + for x in (self_elt, child_elt, parent_elt, bsc_elt, + repository_elt, list_resources_elt, + list_roa_requests_elt, list_ghostbuster_requests_elt, + list_ee_certificate_requests_elt, + list_published_objects_elt, + list_received_resources_elt, report_error_elt)) + + def serve_top_level(self, gctx, cb): + """ + Serve one msg PDU. + """ + + r_msg = self.__class__.reply() + + def loop(iterator, q_pdu): + + def fail(e): + if not isinstance(e, rpki.exceptions.NotFound): + rpki.log.traceback() + r_msg.append(report_error_elt.from_exception( + e, self_handle = q_pdu.self_handle, tag = q_pdu.tag)) + cb(r_msg) + + try: + q_pdu.gctx = gctx + q_pdu.serve_dispatch(r_msg, iterator, fail) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + fail(e) + + def done(): + cb(r_msg) + + rpki.async.iterator(self, loop, done) + +class sax_handler(rpki.xml_utils.sax_handler): + """ + SAX handler for Left-Right protocol. + """ + + pdu = msg + name = "msg" + version = "1" + +class cms_msg(rpki.x509.XML_CMS_object): + """ + Class to hold a CMS-signed left-right PDU. + """ + + encoding = "us-ascii" + schema = rpki.relaxng.left_right + saxify = sax_handler.saxify diff --git a/rpki/log.py b/rpki/log.py new file mode 100644 index 00000000..c605331a --- /dev/null +++ b/rpki/log.py @@ -0,0 +1,199 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Logging facilities for RPKI libraries. +""" + +import syslog +import sys +import os +import time +import traceback as tb + +try: + have_setproctitle = False + if os.getenv("DISABLE_SETPROCTITLE") is None: + import setproctitle + have_setproctitle = True +except ImportError: + pass + +## @var enable_trace +# Whether call tracing is enabled. + +enable_trace = False + +## @var show_python_ids +# Whether __repr__() methods should show Python id numbers + +show_python_ids = False + +## @var enable_tracebacks +# Whether tracebacks are enabled globally. Individual classes and +# modules may choose to override this. + +enable_tracebacks = False + +## @var use_setproctitle +# Whether to use setproctitle (if available) to change name shown for +# this process in ps listings (etc). + +use_setproctitle = True + +## @var proctitle_extra + +# Extra text to include in proctitle display. By default this is the +# tail of the current directory name, as this is often useful, but you +# can set it to something else if you like. If None or the empty +# string, the extra information field will be omitted from the proctitle. + +proctitle_extra = os.path.basename(os.getcwd()) + +def init(ident = "rpki", flags = syslog.LOG_PID, facility = syslog.LOG_DAEMON, use_syslog = None, log_file = sys.stderr, tag_log_lines = True): + """ + Initialize logging system. + """ + + # If caller didn't say whether to use syslog, use log file if user supplied one, otherwise use syslog + + if use_syslog is None: + use_syslog = log_file is sys.stderr + + logger.use_syslog = use_syslog + logger.tag_log_lines = tag_log_lines + + if use_syslog: + syslog.openlog(ident, flags, facility) + + else: + logger.tag = ident + logger.pid = os.getpid() + logger.log_file = log_file + + if ident and have_setproctitle and use_setproctitle: + if proctitle_extra: + setproctitle.setproctitle("%s (%s)" % (ident, proctitle_extra)) + else: + setproctitle.setproctitle(ident) + +class logger(object): + """ + Closure for logging. + """ + + use_syslog = True + tag = "" + pid = 0 + log_file = sys.stderr + + def __init__(self, priority): + self.priority = priority + + def __call__(self, message): + if self.use_syslog: + syslog.syslog(self.priority, message) + elif self.tag_log_lines: + self.log_file.write("%s %s[%d]: %s\n" % (time.strftime("%F %T"), self.tag, self.pid, message)) + self.log_file.flush() + else: + self.log_file.write(message + "\n") + self.log_file.flush() + +error = logger(syslog.LOG_ERR) +warn = logger(syslog.LOG_WARNING) +note = logger(syslog.LOG_NOTICE) +info = logger(syslog.LOG_INFO) +debug = logger(syslog.LOG_DEBUG) + + +def set_trace(enable): + """ + Enable or disable call tracing. + """ + + global enable_trace + enable_trace = enable + +def trace(): + """ + Execution trace -- where are we now, and whence came we here? + """ + + if enable_trace: + bt = tb.extract_stack(limit = 3) + return debug("[%s() at %s:%d from %s:%d]" % (bt[1][2], bt[1][0], bt[1][1], bt[0][0], bt[0][1])) + +def traceback(do_it = None): + """ + Consolidated backtrace facility with a bit of extra info. Argument + specifies whether or not to log the traceback (some modules and + classes have their own controls for this, this lets us provide a + unified interface). If no argument is specified, we use the global + default value rpki.log.enable_tracebacks. + + Assertion failures generate backtraces unconditionally, on the + theory that (a) assertion failures are programming errors by + definition, and (b) it's often hard to figure out what's triggering + a particular assertion failure without the backtrace. + """ + + if do_it is None: + do_it = enable_tracebacks + + e = sys.exc_info()[1] + assert e is not None, "rpki.log.traceback() called without valid trace on stack! This should not happen." + + if do_it or isinstance(e, AssertionError): + bt = tb.extract_stack(limit = 3) + error("Exception caught in %s() at %s:%d called from %s:%d" % (bt[1][2], bt[1][0], bt[1][1], bt[0][0], bt[0][1])) + bt = tb.format_exc() + assert bt is not None, "Apparently I'm still not using the right test for null backtrace" + for line in bt.splitlines(): + warn(line) + +def log_repr(obj, *tokens): + """ + Constructor for __repr__() strings, handles suppression of Python + IDs as needed, includes self_handle when available. + """ + + # pylint: disable=W0702 + + words = ["%s.%s" % (obj.__class__.__module__, obj.__class__.__name__)] + try: + words.append("{%s}" % obj.self.self_handle) + except: + pass + + for token in tokens: + if token is not None: + try: + s = str(token) + except: + s = "???" + debug("Failed to generate repr() string for object of type %r" % type(token)) + traceback() + if s: + words.append(s) + + if show_python_ids: + words.append(" at %#x" % id(obj)) + + return "<" + " ".join(words) + ">" diff --git a/rpki/myrpki.py b/rpki/myrpki.py new file mode 100644 index 00000000..c5c7990f --- /dev/null +++ b/rpki/myrpki.py @@ -0,0 +1,23 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This is a tombstone for a program that no longer exists. +""" + +if __name__ != "__main__": # sic -- don't break regression tests + import sys + sys.exit('"myrpki" is obsolete. Please use "rpkic" instead.') diff --git a/rpki/mysql_import.py b/rpki/mysql_import.py new file mode 100644 index 00000000..88d30357 --- /dev/null +++ b/rpki/mysql_import.py @@ -0,0 +1,65 @@ +# $Id$ +# +# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Import wrapper for MySQLdb. + +MySQLdb is an independent package, not part of Python, and has some +minor version skew issues with respect to Python itself, which we want +to suppress so that they don't annoy the user. None of this is +particularly hard, but the maze of whacky incantations required to do +this in multiple version of Python on multiple platforms is somewhat +tedious, and turns out to cause other problems when combined with the +way we construct executable Python scripts containing a standard +header indicating the location of our config file. + +So it turns out to be easier just to put all of the import voodoo +here, and have other modules that need MySQLdb import the MySQL module +object from this module. Looks kind of strange, but seems to work. +""" + +# pylint: disable=W0611 + +from __future__ import with_statement + +import warnings + +if hasattr(warnings, "catch_warnings"): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + import MySQLdb +else: + import MySQLdb + +import _mysql_exceptions + +warnings.simplefilter("error", _mysql_exceptions.Warning) + +import MySQLdb.converters diff --git a/rpki/oids.py b/rpki/oids.py new file mode 100644 index 00000000..a97df6a7 --- /dev/null +++ b/rpki/oids.py @@ -0,0 +1,101 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +OID database. + +This used to be fairly complicated, with multiple representations and +a collection of conversion functions, but now it is very simple: + +- We represent OIDs as Python strings, holding the dotted-decimal + form of an OID. Nothing but decimal digits and "." is legal. + This is compatible with the format that rpki.POW uses. + +- We define symbols in this module whose values are OIDs. + +That's pretty much it. There's a bit of code at the end which checks +the syntax of the defined strings and provides a pretty-print function +for the rare occasion when we need to print an OID, but other than +that this is just a collection of symbolic names for text strings. +""" + +ecdsa_with_SHA256 = "1.2.840.10045.4.3.2" +sha256WithRSAEncryption = "1.2.840.113549.1.1.11" +sha384WithRSAEncryption = "1.2.840.113549.1.1.12" +sha512WithRSAEncryption = "1.2.840.113549.1.1.13" +id_data = "1.2.840.113549.1.7.1" +id_smime = "1.2.840.113549.1.9.16" +id_ct = "1.2.840.113549.1.9.16.1" +id_ct_routeOriginAttestation = "1.2.840.113549.1.9.16.1.24" +id_ct_rpkiManifest = "1.2.840.113549.1.9.16.1.26" +id_ct_xml = "1.2.840.113549.1.9.16.1.28" +id_ct_rpkiGhostbusters = "1.2.840.113549.1.9.16.1.35" +authorityInfoAccess = "1.3.6.1.5.5.7.1.1" +sbgp_ipAddrBlock = "1.3.6.1.5.5.7.1.7" +sbgp_autonomousSysNum = "1.3.6.1.5.5.7.1.8" +subjectInfoAccess = "1.3.6.1.5.5.7.1.11" +id_kp_bgpsec_router = "1.3.6.1.5.5.7.3.30" +id_cp_ipAddr_asNumber = "1.3.6.1.5.5.7.14.2" +id_ad_caIssuers = "1.3.6.1.5.5.7.48.2" +id_ad_caRepository = "1.3.6.1.5.5.7.48.5" +id_ad_signedObjectRepository = "1.3.6.1.5.5.7.48.9" +id_ad_rpkiManifest = "1.3.6.1.5.5.7.48.10" +id_ad_signedObject = "1.3.6.1.5.5.7.48.11" +commonName = "2.5.4.3" +serialNumber = "2.5.4.5" +countryName = "2.5.4.6" +localityName = "2.5.4.7" +stateOrProvinceName = "2.5.4.8" +streetAddress = "2.5.4.9" +organizationName = "2.5.4.10" +organizationalUnitName = "2.5.4.11" +subjectKeyIdentifier = "2.5.29.14" +keyUsage = "2.5.29.15" +basicConstraints = "2.5.29.19" +cRLNumber = "2.5.29.20" +cRLDistributionPoints = "2.5.29.31" +certificatePolicies = "2.5.29.32" +authorityKeyIdentifier = "2.5.29.35" +extendedKeyUsage = "2.5.29.37" +id_sha256 = "2.16.840.1.101.3.4.2.1" + +# Make sure all symbols exported so far look like OIDs, and build a +# dictionary to use when pretty-printing. + +_oid2name = {} + +for _sym in dir(): + if not _sym.startswith("_"): + _val = globals()[_sym] + if not isinstance(_val, str) or not all(_v.isdigit() for _v in _val.split(".")): + raise ValueError("Bad OID definition: %s = %r" % (_sym, _val)) + _oid2name[_val] = _sym.replace("_", "-") + +del _sym +del _val + +def oid2name(oid): + """ + Translate an OID into a string suitable for printing. + """ + + if not isinstance(oid, (str, unicode)) or not all(o.isdigit() for o in oid.split(".")): + raise ValueError("Parameter does not look like an OID string: " + repr(oid)) + + return _oid2name.get(oid, oid) diff --git a/rpki/old_irdbd.py b/rpki/old_irdbd.py new file mode 100644 index 00000000..41060344 --- /dev/null +++ b/rpki/old_irdbd.py @@ -0,0 +1,325 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +IR database daemon. + +This is the old (pre-Django) version of irdbd, still used by smoketest +and perhaps still useful as a minimal example. This does NOT work with +the GUI, rpkic, or any of the other more recent tools. +""" + +import sys +import os +import time +import argparse +import urlparse +import rpki.http +import rpki.config +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.log +import rpki.x509 + +from rpki.mysql_import import MySQLdb + +class main(object): + + + def handle_list_resources(self, q_pdu, r_msg): + + r_pdu = rpki.left_right.list_resources_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.child_handle = q_pdu.child_handle + + self.cur.execute( + """ + SELECT registrant_id, valid_until + FROM registrant + WHERE registry_handle = %s AND registrant_handle = %s + """, + (q_pdu.self_handle, q_pdu.child_handle)) + + if self.cur.rowcount != 1: + raise rpki.exceptions.NotInDatabase( + "This query should have produced a single exact match, something's messed up" + " (rowcount = %d, self_handle = %s, child_handle = %s)" + % (self.cur.rowcount, q_pdu.self_handle, q_pdu.child_handle)) + + registrant_id, valid_until = self.cur.fetchone() + + r_pdu.valid_until = valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + + r_pdu.asn = rpki.resource_set.resource_set_as.from_sql( + self.cur, + """ + SELECT start_as, end_as + FROM registrant_asn + WHERE registrant_id = %s + """, + (registrant_id,)) + + r_pdu.ipv4 = rpki.resource_set.resource_set_ipv4.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM registrant_net + WHERE registrant_id = %s AND version = 4 + """, + (registrant_id,)) + + r_pdu.ipv6 = rpki.resource_set.resource_set_ipv6.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM registrant_net + WHERE registrant_id = %s AND version = 6 + """, + (registrant_id,)) + + r_msg.append(r_pdu) + + + def handle_list_roa_requests(self, q_pdu, r_msg): + + self.cur.execute( + "SELECT roa_request_id, asn FROM roa_request WHERE self_handle = %s", + (q_pdu.self_handle,)) + + for roa_request_id, asn in self.cur.fetchall(): + + r_pdu = rpki.left_right.list_roa_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.asn = asn + + r_pdu.ipv4 = rpki.resource_set.roa_prefix_set_ipv4.from_sql( + self.cur, + """ + SELECT prefix, prefixlen, max_prefixlen + FROM roa_request_prefix + WHERE roa_request_id = %s AND version = 4 + """, + (roa_request_id,)) + + r_pdu.ipv6 = rpki.resource_set.roa_prefix_set_ipv6.from_sql( + self.cur, + """ + SELECT prefix, prefixlen, max_prefixlen + FROM roa_request_prefix + WHERE roa_request_id = %s AND version = 6 + """, + (roa_request_id,)) + + r_msg.append(r_pdu) + + + def handle_list_ghostbuster_requests(self, q_pdu, r_msg): + + self.cur.execute( + """ + SELECT vcard + FROM ghostbuster_request + WHERE self_handle = %s AND parent_handle = %s + """, + (q_pdu.self_handle, q_pdu.parent_handle)) + + vcards = [result[0] for result in self.cur.fetchall()] + + if not vcards: + + self.cur.execute( + """ + SELECT vcard + FROM ghostbuster_request + WHERE self_handle = %s AND parent_handle IS NULL + """, + (q_pdu.self_handle,)) + + vcards = [result[0] for result in self.cur.fetchall()] + + for vcard in vcards: + r_pdu = rpki.left_right.list_ghostbuster_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.parent_handle = q_pdu.parent_handle + r_pdu.vcard = vcard + r_msg.append(r_pdu) + + + def handle_list_ee_certificate_requests(self, q_pdu, r_msg): + + self.cur.execute( + """ + SELECT ee_certificate_id, pkcs10, gski, cn, sn, eku, valid_until + FROM ee_certificate + WHERE self_handle = %s + """, + (q_pdu.self_handle,)) + + for ee_certificate_id, pkcs10, gski, cn, sn, eku, valid_until in self.cur.fetchall(): + + r_pdu = rpki.left_right.list_ee_certificate_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.valid_until = valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + r_pdu.pkcs10 = rpki.x509.PKCS10(DER = pkcs10) + r_pdu.gski = gski + r_pdu.cn = cn + r_pdu.sn = sn + r_pdu.eku = eku + + r_pdu.asn = rpki.resource_set.resource_set_as.from_sql( + self.cur, + """ + SELECT start_as, end_as + FROM ee_certificate_asn + WHERE ee_certificate_id = %s + """, + (ee_certificate_id,)) + + r_pdu.ipv4 = rpki.resource_set.resource_set_ipv4.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM ee_certificate_net + WHERE ee_certificate_id = %s AND version = 4 + """, + (ee_certificate_id,)) + + r_pdu.ipv6 = rpki.resource_set.resource_set_ipv6.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM ee_certificate_net + WHERE ee_certificate_id = %s AND version = 6 + """, + (ee_certificate_id,)) + + r_msg.append(r_pdu) + + + handle_dispatch = { + rpki.left_right.list_resources_elt : handle_list_resources, + rpki.left_right.list_roa_requests_elt : handle_list_roa_requests, + rpki.left_right.list_ghostbuster_requests_elt : handle_list_ghostbuster_requests, + rpki.left_right.list_ee_certificate_requests_elt : handle_list_ee_certificate_requests } + + def handler(self, query, path, cb): + try: + + self.db.ping(True) + + r_msg = rpki.left_right.msg.reply() + + try: + + q_msg = rpki.left_right.cms_msg(DER = query).unwrap((self.bpki_ta, self.rpkid_cert)) + + if not isinstance(q_msg, rpki.left_right.msg) or not q_msg.is_query(): + raise rpki.exceptions.BadQuery, "Unexpected %r PDU" % q_msg + + for q_pdu in q_msg: + + try: + + try: + h = self.handle_dispatch[type(q_pdu)] + except KeyError: + raise rpki.exceptions.BadQuery, "Unexpected %r PDU" % q_pdu + else: + h(self, q_pdu, r_msg) + + except (rpki.async.ExitNow, SystemExit): + raise + + except Exception, e: + rpki.log.traceback() + r_msg.append(rpki.left_right.report_error_elt.from_exception(e, q_pdu.self_handle, q_pdu.tag)) + + except (rpki.async.ExitNow, SystemExit): + raise + + except Exception, e: + rpki.log.traceback() + r_msg.append(rpki.left_right.report_error_elt.from_exception(e)) + + cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, self.irdbd_key, self.irdbd_cert)) + + except (rpki.async.ExitNow, SystemExit): + raise + + except Exception, e: + rpki.log.traceback() + + # We only get here in cases where we couldn't or wouldn't generate + # <report_error/>, so just return HTTP failure. + + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + + def __init__(self): + + os.environ["TZ"] = "UTC" + time.tzset() + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + args = parser.parse_args() + + rpki.log.init("irdbd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "irdbd") + + startup_msg = self.cfg.get("startup-message", "") + if startup_msg: + rpki.log.info(startup_msg) + + self.cfg.set_global_flags() + + self.db = MySQLdb.connect(user = self.cfg.get("sql-username"), + db = self.cfg.get("sql-database"), + passwd = self.cfg.get("sql-password")) + + self.cur = self.db.cursor() + self.db.autocommit(True) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.rpkid_cert = rpki.x509.X509(Auto_update = self.cfg.get("rpkid-cert")) + self.irdbd_cert = rpki.x509.X509(Auto_update = self.cfg.get("irdbd-cert")) + self.irdbd_key = rpki.x509.RSA( Auto_update = self.cfg.get("irdbd-key")) + + u = urlparse.urlparse(self.cfg.get("http-url")) + + assert u.scheme in ("", "http") and \ + u.username is None and \ + u.password is None and \ + u.params == "" and \ + u.query == "" and \ + u.fragment == "" + + rpki.http.server(host = u.hostname or "localhost", + port = u.port or 443, + handlers = ((u.path, self.handler),)) diff --git a/rpki/pubd.py b/rpki/pubd.py new file mode 100644 index 00000000..31f22ed4 --- /dev/null +++ b/rpki/pubd.py @@ -0,0 +1,174 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI publication engine. +""" + +import os +import time +import argparse +import sys +import re +import rpki.resource_set +import rpki.up_down +import rpki.x509 +import rpki.sql +import rpki.http +import rpki.config +import rpki.exceptions +import rpki.relaxng +import rpki.log +import rpki.publication +import rpki.daemonize + +class main(object): + """ + Main program for pubd. + """ + + def __init__(self): + + os.environ["TZ"] = "UTC" + time.tzset() + + self.irbe_cms_timestamp = None + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + parser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + args = parser.parse_args() + + self.profile = args.profile + + rpki.log.init("pubd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "pubd") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + if self.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main) + finally: + prof.dump_stats(self.profile) + rpki.log.info("Dumped profile data to %s" % self.profile) + else: + self.main() + + def main(self): + + if self.profile: + rpki.log.info("Running in profile mode with output to %s" % self.profile) + + self.sql = rpki.sql.session(self.cfg) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.irbe_cert = rpki.x509.X509(Auto_update = self.cfg.get("irbe-cert")) + self.pubd_cert = rpki.x509.X509(Auto_update = self.cfg.get("pubd-cert")) + self.pubd_key = rpki.x509.RSA( Auto_update = self.cfg.get("pubd-key")) + + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + + self.publication_base = self.cfg.get("publication-base", "publication/") + + self.publication_multimodule = self.cfg.getboolean("publication-multimodule", False) + + rpki.http.server( + host = self.http_server_host, + port = self.http_server_port, + handlers = (("/control", self.control_handler), + ("/client/", self.client_handler))) + + def handler_common(self, query, client, cb, certs, crl = None): + """ + Common PDU handler code. + """ + + def done(r_msg): + reply = rpki.publication.cms_msg().wrap(r_msg, self.pubd_key, self.pubd_cert, crl) + self.sql.sweep() + cb(reply) + + q_cms = rpki.publication.cms_msg(DER = query) + q_msg = q_cms.unwrap(certs) + if client is None: + self.irbe_cms_timestamp = q_cms.check_replay(self.irbe_cms_timestamp, "control") + else: + q_cms.check_replay_sql(client, client.client_handle) + q_msg.serve_top_level(self, client, done) + + def control_handler(self, query, path, cb): + """ + Process one PDU from the IRBE. + """ + + def done(body): + cb(200, body = body) + + rpki.log.trace() + try: + self.handler_common(query, None, done, (self.bpki_ta, self.irbe_cert)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + client_url_regexp = re.compile("/client/([-A-Z0-9_/]+)$", re.I) + + def client_handler(self, query, path, cb): + """ + Process one PDU from a client. + """ + + def done(body): + cb(200, body = body) + + rpki.log.trace() + try: + match = self.client_url_regexp.search(path) + if match is None: + raise rpki.exceptions.BadContactURL, "Bad path: %s" % path + client_handle = match.group(1) + client = rpki.publication.client_elt.sql_fetch_where1(self, "client_handle = %s", (client_handle,)) + if client is None: + raise rpki.exceptions.ClientNotFound, "Could not find client %s" % client_handle + config = rpki.publication.config_elt.fetch(self) + if config is None or config.bpki_crl is None: + raise rpki.exceptions.CMSCRLNotSet + self.handler_common(query, client, done, (self.bpki_ta, client.bpki_cert, client.bpki_glue), config.bpki_crl) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Could not process PDU: %s" % e) diff --git a/rpki/publication.py b/rpki/publication.py new file mode 100644 index 00000000..2462ae39 --- /dev/null +++ b/rpki/publication.py @@ -0,0 +1,466 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI "publication" protocol. +""" + +import os +import errno +import rpki.resource_set +import rpki.x509 +import rpki.sql +import rpki.exceptions +import rpki.xml_utils +import rpki.http +import rpki.up_down +import rpki.relaxng +import rpki.sundial +import rpki.log + +class publication_namespace(object): + """ + XML namespace parameters for publication protocol. + """ + + xmlns = "http://www.hactrn.net/uris/rpki/publication-spec/" + nsmap = { None : xmlns } + +class control_elt(rpki.xml_utils.data_elt, rpki.sql.sql_persistent, publication_namespace): + """ + Virtual class for control channel objects. + """ + + def serve_dispatch(self, r_msg, cb, eb): + """ + Action dispatch handler. This needs special handling because we + need to make sure that this PDU arrived via the control channel. + """ + if self.client is not None: + raise rpki.exceptions.BadQuery, "Control query received on client channel" + rpki.xml_utils.data_elt.serve_dispatch(self, r_msg, cb, eb) + +class config_elt(control_elt): + """ + <config/> element. This is a little weird because there should + never be more than one row in the SQL config table, but we have to + put the BPKI CRL somewhere and SQL is the least bad place available. + + So we reuse a lot of the SQL machinery, but we nail config_id at 1, + we don't expose it in the XML protocol, and we only support the get + and set actions. + """ + + attributes = ("action", "tag") + element_name = "config" + elements = ("bpki_crl",) + + sql_template = rpki.sql.template( + "config", + "config_id", + ("bpki_crl", rpki.x509.CRL)) + + wired_in_config_id = 1 + + def startElement(self, stack, name, attrs): + """ + StartElement() handler for config object. This requires special + handling because of the weird way we treat config_id. + """ + control_elt.startElement(self, stack, name, attrs) + self.config_id = self.wired_in_config_id + + @classmethod + def fetch(cls, gctx): + """ + Fetch the config object from SQL. This requires special handling + because of the weird way we treat config_id. + """ + return cls.sql_fetch(gctx, cls.wired_in_config_id) + + def serve_set(self, r_msg, cb, eb): + """ + Handle a set action. This requires special handling because + config doesn't support the create method. + """ + if self.sql_fetch(self.gctx, self.config_id) is None: + control_elt.serve_create(self, r_msg, cb, eb) + else: + control_elt.serve_set(self, r_msg, cb, eb) + + def serve_fetch_one_maybe(self): + """ + Find the config object on which a get or set method should + operate. + """ + return self.sql_fetch(self.gctx, self.config_id) + +class client_elt(control_elt): + """ + <client/> element. + """ + + element_name = "client" + attributes = ("action", "tag", "client_handle", "base_uri") + elements = ("bpki_cert", "bpki_glue") + booleans = ("clear_replay_protection",) + + sql_template = rpki.sql.template( + "client", + "client_id", + "client_handle", + "base_uri", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + base_uri = None + bpki_cert = None + bpki_glue = None + last_cms_timestamp = None + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for client_elt. + """ + actions = [] + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a clear_replay_protection action for this client. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + def serve_fetch_one_maybe(self): + """ + Find the client object on which a get, set, or destroy method + should operate, or which would conflict with a create method. + """ + return self.sql_fetch_where1(self.gctx, "client_handle = %s", self.client_handle) + + def serve_fetch_all(self): + """ + Find client objects on which a list method should operate. + """ + return self.sql_fetch_all(self.gctx) + + def check_allowed_uri(self, uri): + """ + Make sure that a target URI is within this client's allowed URI space. + """ + if not uri.startswith(self.base_uri): + raise rpki.exceptions.ForbiddenURI + +class publication_object_elt(rpki.xml_utils.base_elt, publication_namespace): + """ + Virtual class for publishable objects. These have very similar + syntax, differences lie in underlying datatype and methods. XML + methods are a little different from the pattern used for objects + that support the create/set/get/list/destroy actions, but + publishable objects don't go in SQL either so these classes would be + different in any case. + """ + + attributes = ("action", "tag", "client_handle", "uri") + payload_type = None + payload = None + + def endElement(self, stack, name, text): + """ + Handle a publishable element element. + """ + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + if text: + self.payload = self.payload_type(Base64 = text) # pylint: disable=E1102 + stack.pop() + + def toXML(self): + """ + Generate XML element for publishable object. + """ + elt = self.make_elt() + if self.payload: + elt.text = self.payload.get_Base64() + return elt + + def serve_dispatch(self, r_msg, cb, eb): + """ + Action dispatch handler. + """ + # pylint: disable=E0203 + try: + if self.client is None: + raise rpki.exceptions.BadQuery, "Client query received on control channel" + dispatch = { "publish" : self.serve_publish, + "withdraw" : self.serve_withdraw } + if self.action not in dispatch: + raise rpki.exceptions.BadQuery, "Unexpected query: action %s" % self.action + self.client.check_allowed_uri(self.uri) + dispatch[self.action]() + r_pdu = self.__class__() + r_pdu.action = self.action + r_pdu.tag = self.tag + r_pdu.uri = self.uri + r_msg.append(r_pdu) + cb() + except rpki.exceptions.NoObjectAtURI, e: + # This can happen when we're cleaning up from a prior mess, so + # we generate a <report_error/> PDU then carry on. + r_msg.append(report_error_elt.from_exception(e, self.tag)) + cb() + + def serve_publish(self): + """ + Publish an object. + """ + rpki.log.info("Publishing %s" % self.payload.tracking_data(self.uri)) + filename = self.uri_to_filename() + filename_tmp = filename + ".tmp" + dirname = os.path.dirname(filename) + if not os.path.isdir(dirname): + os.makedirs(dirname) + f = open(filename_tmp, "wb") + f.write(self.payload.get_DER()) + f.close() + os.rename(filename_tmp, filename) + + def serve_withdraw(self): + """ + Withdraw an object, then recursively delete empty directories. + """ + rpki.log.info("Withdrawing %s" % self.uri) + filename = self.uri_to_filename() + try: + os.remove(filename) + except OSError, e: + if e.errno == errno.ENOENT: + raise rpki.exceptions.NoObjectAtURI, "No object published at %s" % self.uri + else: + raise + min_path_len = len(self.gctx.publication_base.rstrip("/")) + dirname = os.path.dirname(filename) + while len(dirname) > min_path_len: + try: + os.rmdir(dirname) + except OSError: + break + else: + dirname = os.path.dirname(dirname) + + def uri_to_filename(self): + """ + Convert a URI to a local filename. + """ + if not self.uri.startswith("rsync://"): + raise rpki.exceptions.BadURISyntax, self.uri + path = self.uri.split("/")[3:] + if not self.gctx.publication_multimodule: + del path[0] + path.insert(0, self.gctx.publication_base.rstrip("/")) + filename = "/".join(path) + if "/../" in filename or filename.endswith("/.."): + raise rpki.exceptions.BadURISyntax, filename + return filename + + @classmethod + def make_publish(cls, uri, obj, tag = None): + """ + Construct a publication PDU. + """ + assert cls.payload_type is not None and type(obj) is cls.payload_type + return cls.make_pdu(action = "publish", uri = uri, payload = obj, tag = tag) + + @classmethod + def make_withdraw(cls, uri, obj, tag = None): + """ + Construct a withdrawal PDU. + """ + assert cls.payload_type is not None and type(obj) is cls.payload_type + return cls.make_pdu(action = "withdraw", uri = uri, tag = tag) + + def raise_if_error(self): + """ + No-op, since this is not a <report_error/> PDU. + """ + pass + +class certificate_elt(publication_object_elt): + """ + <certificate/> element. + """ + + element_name = "certificate" + payload_type = rpki.x509.X509 + +class crl_elt(publication_object_elt): + """ + <crl/> element. + """ + + element_name = "crl" + payload_type = rpki.x509.CRL + +class manifest_elt(publication_object_elt): + """ + <manifest/> element. + """ + + element_name = "manifest" + payload_type = rpki.x509.SignedManifest + +class roa_elt(publication_object_elt): + """ + <roa/> element. + """ + + element_name = "roa" + payload_type = rpki.x509.ROA + +class ghostbuster_elt(publication_object_elt): + """ + <ghostbuster/> element. + """ + + element_name = "ghostbuster" + payload_type = rpki.x509.Ghostbuster + +publication_object_elt.obj2elt = dict( + (e.payload_type, e) for e in + (certificate_elt, crl_elt, manifest_elt, roa_elt, ghostbuster_elt)) + +class report_error_elt(rpki.xml_utils.text_elt, publication_namespace): + """ + <report_error/> element. + """ + + element_name = "report_error" + attributes = ("tag", "error_code") + text_attribute = "error_text" + + error_text = None + + @classmethod + def from_exception(cls, e, tag = None): + """ + Generate a <report_error/> element from an exception. + """ + self = cls() + self.tag = tag + self.error_code = e.__class__.__name__ + self.error_text = str(e) + return self + + def __str__(self): + s = "" + if getattr(self, "tag", None) is not None: + s += "[%s] " % self.tag + s += self.error_code + if getattr(self, "error_text", None) is not None: + s += ": " + self.error_text + return s + + def raise_if_error(self): + """ + Raise exception associated with this <report_error/> PDU. + """ + t = rpki.exceptions.__dict__.get(self.error_code) + if isinstance(t, type) and issubclass(t, rpki.exceptions.RPKI_Exception): + raise t, getattr(self, "text", None) + else: + raise rpki.exceptions.BadPublicationReply, "Unexpected response from pubd: %s" % self + +class msg(rpki.xml_utils.msg, publication_namespace): + """ + Publication PDU. + """ + + ## @var version + # Protocol version + version = 1 + + ## @var pdus + # Dispatch table of PDUs for this protocol. + pdus = dict((x.element_name, x) for x in + (config_elt, client_elt, certificate_elt, crl_elt, manifest_elt, roa_elt, ghostbuster_elt, report_error_elt)) + + def serve_top_level(self, gctx, client, cb): + """ + Serve one msg PDU. + """ + if not self.is_query(): + raise rpki.exceptions.BadQuery, "Message type is not query" + r_msg = self.__class__.reply() + + def loop(iterator, q_pdu): + + def fail(e): + if not isinstance(e, rpki.exceptions.NotFound): + rpki.log.traceback() + r_msg.append(report_error_elt.from_exception(e, q_pdu.tag)) + cb(r_msg) + + try: + q_pdu.gctx = gctx + q_pdu.client = client + q_pdu.serve_dispatch(r_msg, iterator, fail) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + fail(e) + + def done(): + cb(r_msg) + + rpki.async.iterator(self, loop, done) + +class sax_handler(rpki.xml_utils.sax_handler): + """ + SAX handler for publication protocol. + """ + + pdu = msg + name = "msg" + version = "1" + +class cms_msg(rpki.x509.XML_CMS_object): + """ + Class to hold a CMS-signed publication PDU. + """ + + encoding = "us-ascii" + schema = rpki.relaxng.publication + saxify = sax_handler.saxify diff --git a/rpki/rcynic.py b/rpki/rcynic.py new file mode 100644 index 00000000..73394fb8 --- /dev/null +++ b/rpki/rcynic.py @@ -0,0 +1,275 @@ +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Iterator class to parse the output of an rcynic run. +""" + +__version__ = '$Id$' + +import os +import rpki.x509 +import rpki.exceptions +import rpki.resource_set +from xml.etree.ElementTree import ElementTree + +class UnknownObject(rpki.exceptions.RPKI_Exception): + """ + Unrecognized object in rcynic result cache. + """ + +class NotRsyncURI(rpki.exceptions.RPKI_Exception): + """ + URI is not an rsync URI. + """ + +class rcynic_object(object): + """ + An object read from rcynic cache. + """ + + def __init__(self, filename, **kwargs): + self.filename = filename + for k, v in kwargs.iteritems(): + setattr(self, k, v) + self.obj = self.obj_class(DER_file = filename) + + def __repr__(self): + return "<%s %s %s at 0x%x>" % (self.__class__.__name__, self.uri, self.resources, id(self)) + + def show_attrs(self, *attrs): + """ + Print a bunch of object attributes, quietly ignoring any that + might be missing. + """ + for a in attrs: + try: + print "%s: %s" % (a.capitalize(), getattr(self, a)) + except AttributeError: + pass + + def show(self): + """ + Print common object attributes. + """ + self.show_attrs("filename", "uri", "status", "timestamp") + +class rcynic_certificate(rcynic_object): + """ + A certificate from rcynic cache. + """ + + obj_class = rpki.x509.X509 + + def __init__(self, filename, **kwargs): + rcynic_object.__init__(self, filename, **kwargs) + self.notBefore = self.obj.getNotBefore() + self.notAfter = self.obj.getNotAfter() + self.aia_uri = self.obj.get_aia_uri() + self.sia_directory_uri = self.obj.get_sia_directory_uri() + self.manifest_uri = self.obj.get_sia_manifest_uri() + self.resources = self.obj.get_3779resources() + self.is_ca = self.obj.is_CA() + self.serial = self.obj.getSerial() + self.issuer = self.obj.getIssuer() + self.subject = self.obj.getSubject() + self.ski = self.obj.hSKI() + self.aki = self.obj.hAKI() + + def show(self): + """ + Print certificate attributes. + """ + rcynic_object.show(self) + self.show_attrs("notBefore", "notAfter", "aia_uri", "sia_directory_uri", "resources") + +class rcynic_roa(rcynic_object): + """ + A ROA from rcynic cache. + """ + + obj_class = rpki.x509.ROA + + def __init__(self, filename, **kwargs): + rcynic_object.__init__(self, filename, **kwargs) + self.obj.extract() + self.asID = self.obj.get_POW().getASID() + self.prefix_sets = [] + v4, v6 = self.obj.get_POW().getPrefixes() + if v4: + self.prefix_sets.append(rpki.resource_set.roa_prefix_set_ipv4([ + rpki.resource_set.roa_prefix_ipv4(p[0], p[1], p[2]) for p in v4])) + if v6: + self.prefix_sets.append(rpki.resource_set.roa_prefix_set_ipv6([ + rpki.resource_set.roa_prefix_ipv6(p[0], p[1], p[2]) for p in v6])) + self.ee = rpki.x509.X509(POW = self.obj.get_POW().certs()[0]) + self.notBefore = self.ee.getNotBefore() + self.notAfter = self.ee.getNotAfter() + self.aia_uri = self.ee.get_aia_uri() + self.resources = self.ee.get_3779resources() + self.issuer = self.ee.getIssuer() + self.serial = self.ee.getSerial() + self.subject = self.ee.getSubject() + self.aki = self.ee.hAKI() + self.ski = self.ee.hSKI() + + def show(self): + """ + Print ROA attributes. + """ + rcynic_object.show(self) + self.show_attrs("notBefore", "notAfter", "aia_uri", "resources", "asID") + if self.prefix_sets: + print "Prefixes:", ",".join(str(i) for i in self.prefix_sets) + +class rcynic_ghostbuster(rcynic_object): + """ + Ghostbuster record from the rcynic cache. + """ + + obj_class = rpki.x509.Ghostbuster + + def __init__(self, *args, **kwargs): + rcynic_object.__init__(self, *args, **kwargs) + self.obj.extract() + self.vcard = self.obj.get_content() + self.ee = rpki.x509.X509(POW = self.obj.get_POW().certs()[0]) + self.notBefore = self.ee.getNotBefore() + self.notAfter = self.ee.getNotAfter() + self.aia_uri = self.ee.get_aia_uri() + self.issuer = self.ee.getIssuer() + self.serial = self.ee.getSerial() + self.subject = self.ee.getSubject() + self.aki = self.ee.hAKI() + self.ski = self.ee.hSKI() + + def show(self): + rcynic_object.show(self) + self.show_attrs("notBefore", "notAfter", "vcard") + +file_name_classes = { + ".cer" : rcynic_certificate, + ".gbr" : rcynic_ghostbuster, + ".roa" : rcynic_roa } + +class rcynic_file_iterator(object): + """ + Iterate over files in an rcynic output tree, yielding a Python + representation of each object found. + """ + + def __init__(self, rcynic_root, + authenticated_subdir = "authenticated"): + self.rcynic_dir = os.path.join(rcynic_root, authenticated_subdir) + + def __iter__(self): + for root, dirs, files in os.walk(self.rcynic_dir): # pylint: disable=W0612 + for filename in files: + filename = os.path.join(root, filename) + ext = os.path.splitext(filename)[1] + if ext in file_name_classes: + yield file_name_classes[ext](filename) + +class validation_status_element(object): + def __init__(self, *args, **kwargs): + self.attrs = [] + for k, v in kwargs.iteritems(): + setattr(self, k, v) + # attribute names are saved so that the __repr__ method can + # display the subset of attributes the user specified + self.attrs.append(k) + self._obj = None + + def get_obj(self): + if not self._obj: + self._obj = self.file_class(filename=self.filename, uri=self.uri) + return self._obj + + def __repr__(self): + v = [self.__class__.__name__, 'id=%s' % str(id(self))] + v.extend(['%s=%s' % (x, getattr(self, x)) for x in self.attrs]) + return '<%s>' % (' '.join(v),) + + obj = property(get_obj) + +class rcynic_xml_iterator(object): + """ + Iterate over validation_status entries in the XML output from an + rcynic run. Yields a tuple for each entry: + + timestamp, generation, status, object + + where URI, status, and timestamp are the corresponding values from + the XML element, OK is a boolean indicating whether validation was + considered succesful, and object is a Python representation of the + object in question. If OK is True, object will be from rcynic's + authenticated output tree; otherwise, object will be from rcynic's + unauthenticated output tree. + + Note that it is possible for the same URI to appear in more than one + validation_status element; in such cases, the succesful case (OK + True) should be the last entry (as rcynic will stop trying once it + gets a good copy), but there may be multiple failures, which might + or might not have different status codes. + """ + + def __init__(self, rcynic_root, xml_file, + authenticated_old_subdir = "authenticated.old", + unauthenticated_subdir = "unauthenticated"): + self.rcynic_root = rcynic_root + self.xml_file = xml_file + self.authenticated_subdir = os.path.join(rcynic_root, 'authenticated') + self.authenticated_old_subdir = os.path.join(rcynic_root, authenticated_old_subdir) + self.unauthenticated_subdir = os.path.join(rcynic_root, unauthenticated_subdir) + + base_uri = "rsync://" + + def uri_to_filename(self, uri): + if uri.startswith(self.base_uri): + return uri[len(self.base_uri):] + else: + raise NotRsyncURI, "Not an rsync URI %r" % uri + + def __iter__(self): + for validation_status in ElementTree(file=self.xml_file).getroot().getiterator("validation_status"): + timestamp = validation_status.get("timestamp") + status = validation_status.get("status") + uri = validation_status.text.strip() + generation = validation_status.get("generation") + + # determine the path to this object + if status == 'object_accepted': + d = self.authenticated_subdir + elif generation == 'backup': + d = self.authenticated_old_subdir + else: + d = self.unauthenticated_subdir + + filename = os.path.join(d, self.uri_to_filename(uri)) + + ext = os.path.splitext(filename)[1] + if ext in file_name_classes: + yield validation_status_element(timestamp = timestamp, generation = generation, + uri=uri, status = status, filename = filename, + file_class = file_name_classes[ext]) + +def label_iterator(xml_file): + """ + Returns an iterator which contains all defined labels from an rcynic XML + output file. Each item is a tuple of the form + (label, kind, description). + """ + + for label in ElementTree(file=xml_file).find("labels"): + yield label.tag, label.get("kind"), label.text.strip() diff --git a/rpki/relaxng.py b/rpki/relaxng.py new file mode 100644 index 00000000..0d8c0d64 --- /dev/null +++ b/rpki/relaxng.py @@ -0,0 +1,2441 @@ +# Automatically generated, do not edit. + +import lxml.etree + +## @var left_right +## Parsed RelaxNG left_right schema +left_right = lxml.etree.RelaxNG(lxml.etree.fromstring(r'''<?xml version="1.0" encoding="UTF-8"?> +<!-- + $Id: left-right-schema.rnc 5753 2014-04-05 19:24:26Z sra $ + + RelaxNG schema for RPKI left-right protocol. + + Copyright (C) 2012- -2014 Dragon Research Labs ("DRL") + Portions copyright (C) 2009- -2011 Internet Systems Consortium ("ISC") + Portions copyright (C) 2007- -2008 American Registry for Internet Numbers ("ARIN") + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notices and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL + WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, + ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR + CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS + OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, + NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION + WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +--> +<grammar ns="http://www.hactrn.net/uris/rpki/left-right-spec/" xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes"> + <!-- Top level PDU --> + <start> + <element name="msg"> + <attribute name="version"> + <data type="positiveInteger"> + <param name="maxInclusive">1</param> + </data> + </attribute> + <choice> + <group> + <attribute name="type"> + <value>query</value> + </attribute> + <zeroOrMore> + <ref name="query_elt"/> + </zeroOrMore> + </group> + <group> + <attribute name="type"> + <value>reply</value> + </attribute> + <zeroOrMore> + <ref name="reply_elt"/> + </zeroOrMore> + </group> + </choice> + </element> + </start> + <!-- PDUs allowed in a query --> + <define name="query_elt" combine="choice"> + <ref name="self_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="bsc_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="parent_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="child_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="repository_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="list_roa_requests_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="list_ghostbuster_requests_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="list_ee_certificate_requests_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="list_resources_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="list_published_objects_query"/> + </define> + <define name="query_elt" combine="choice"> + <ref name="list_received_resources_query"/> + </define> + <!-- PDUs allowed in a reply --> + <define name="reply_elt" combine="choice"> + <ref name="self_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="bsc_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="parent_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="child_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="repository_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="list_resources_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="list_roa_requests_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="list_ghostbuster_requests_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="list_ee_certificate_requests_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="list_published_objects_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="list_received_resources_reply"/> + </define> + <define name="reply_elt" combine="choice"> + <ref name="report_error_reply"/> + </define> + <!-- Tag attributes for bulk operations --> + <define name="tag"> + <optional> + <attribute name="tag"> + <data type="token"> + <param name="maxLength">1024</param> + </data> + </attribute> + </optional> + </define> + <!-- + Combinations of action and type attributes used in later definitions. + The same patterns repeat in most of the elements in this protocol. + --> + <define name="ctl_create"> + <attribute name="action"> + <value>create</value> + </attribute> + <ref name="tag"/> + </define> + <define name="ctl_set"> + <attribute name="action"> + <value>set</value> + </attribute> + <ref name="tag"/> + </define> + <define name="ctl_get"> + <attribute name="action"> + <value>get</value> + </attribute> + <ref name="tag"/> + </define> + <define name="ctl_list"> + <attribute name="action"> + <value>list</value> + </attribute> + <ref name="tag"/> + </define> + <define name="ctl_destroy"> + <attribute name="action"> + <value>destroy</value> + </attribute> + <ref name="tag"/> + </define> + <!-- Base64 encoded DER stuff --> + <define name="base64"> + <data type="base64Binary"> + <param name="maxLength">512000</param> + </data> + </define> + <!-- + Base definition for all fields that are really just SQL primary indices + sql_id = xsd:nonNegativeInteger + --> + <!-- + ...except that fields containing SQL primary indicies don't belong + in this protocol, so they're turninging into handles. + Length restriction is a MySQL implementation issue. + Handles are case-insensitive (because SQL is, among other reasons). + --> + <define name="object_handle"> + <data type="string"> + <param name="maxLength">255</param> + <param name="pattern">[\-_A-Za-z0-9]+</param> + </data> + </define> + <!-- URIs --> + <define name="uri"> + <data type="anyURI"> + <param name="maxLength">4096</param> + </data> + </define> + <!-- Name fields imported from up-down protocol --> + <define name="up_down_name"> + <data type="token"> + <param name="maxLength">1024</param> + </data> + </define> + <!-- Resource lists --> + <define name="asn_list"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,0-9]*</param> + </data> + </define> + <define name="ipv4_list"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,0-9/.]*</param> + </data> + </define> + <define name="ipv6_list"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,0-9/:a-fA-F]*</param> + </data> + </define> + <!-- <self/> element --> + <define name="self_bool"> + <optional> + <attribute name="rekey"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="reissue"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="revoke"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="run_now"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="publish_world_now"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="revoke_forgotten"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="clear_replay_protection"> + <value>yes</value> + </attribute> + </optional> + </define> + <define name="self_payload"> + <optional> + <attribute name="use_hsm"> + <choice> + <value>yes</value> + <value>no</value> + </choice> + </attribute> + </optional> + <optional> + <attribute name="crl_interval"> + <data type="positiveInteger"/> + </attribute> + </optional> + <optional> + <attribute name="regen_margin"> + <data type="positiveInteger"/> + </attribute> + </optional> + <optional> + <element name="bpki_cert"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_glue"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="self_handle"> + <attribute name="self_handle"> + <ref name="object_handle"/> + </attribute> + </define> + <define name="self_query" combine="choice"> + <element name="self"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="self_bool"/> + <ref name="self_payload"/> + </element> + </define> + <define name="self_reply" combine="choice"> + <element name="self"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + </element> + </define> + <define name="self_query" combine="choice"> + <element name="self"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="self_bool"/> + <ref name="self_payload"/> + </element> + </define> + <define name="self_reply" combine="choice"> + <element name="self"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + </element> + </define> + <define name="self_query" combine="choice"> + <element name="self"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + </element> + </define> + <define name="self_reply" combine="choice"> + <element name="self"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="self_payload"/> + </element> + </define> + <define name="self_query" combine="choice"> + <element name="self"> + <ref name="ctl_list"/> + </element> + </define> + <define name="self_reply" combine="choice"> + <element name="self"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + <ref name="self_payload"/> + </element> + </define> + <define name="self_query" combine="choice"> + <element name="self"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + </element> + </define> + <define name="self_reply" combine="choice"> + <element name="self"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + </element> + </define> + <!-- <bsc/> element. Key parameters hardwired for now. --> + <define name="bsc_bool"> + <optional> + <attribute name="generate_keypair"> + <value>yes</value> + </attribute> + <optional> + <attribute name="key_type"> + <value>rsa</value> + </attribute> + </optional> + <optional> + <attribute name="hash_alg"> + <value>sha256</value> + </attribute> + </optional> + <optional> + <attribute name="key_length"> + <value>2048</value> + </attribute> + </optional> + </optional> + </define> + <define name="bsc_handle"> + <attribute name="bsc_handle"> + <ref name="object_handle"/> + </attribute> + </define> + <define name="bsc_payload"> + <optional> + <element name="signing_cert"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="signing_cert_crl"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="bsc_readonly"> + <optional> + <element name="pkcs10_request"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="bsc_query" combine="choice"> + <element name="bsc"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + <ref name="bsc_bool"/> + <ref name="bsc_payload"/> + </element> + </define> + <define name="bsc_reply" combine="choice"> + <element name="bsc"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + <ref name="bsc_readonly"/> + </element> + </define> + <define name="bsc_query" combine="choice"> + <element name="bsc"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + <ref name="bsc_bool"/> + <ref name="bsc_payload"/> + </element> + </define> + <define name="bsc_reply" combine="choice"> + <element name="bsc"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + <ref name="bsc_readonly"/> + </element> + </define> + <define name="bsc_query" combine="choice"> + <element name="bsc"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + </element> + </define> + <define name="bsc_reply" combine="choice"> + <element name="bsc"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + <ref name="bsc_payload"/> + <ref name="bsc_readonly"/> + </element> + </define> + <define name="bsc_query" combine="choice"> + <element name="bsc"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + </element> + </define> + <define name="bsc_reply" combine="choice"> + <element name="bsc"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + <ref name="bsc_payload"/> + <ref name="bsc_readonly"/> + </element> + </define> + <define name="bsc_query" combine="choice"> + <element name="bsc"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + </element> + </define> + <define name="bsc_reply" combine="choice"> + <element name="bsc"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="bsc_handle"/> + </element> + </define> + <!-- <parent/> element --> + <define name="parent_handle"> + <attribute name="parent_handle"> + <ref name="object_handle"/> + </attribute> + </define> + <define name="parent_bool"> + <optional> + <attribute name="rekey"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="reissue"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="revoke"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="revoke_forgotten"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="clear_replay_protection"> + <value>yes</value> + </attribute> + </optional> + </define> + <define name="parent_payload"> + <optional> + <attribute name="peer_contact_uri"> + <ref name="uri"/> + </attribute> + </optional> + <optional> + <attribute name="sia_base"> + <ref name="uri"/> + </attribute> + </optional> + <optional> + <ref name="bsc_handle"/> + </optional> + <optional> + <ref name="repository_handle"/> + </optional> + <optional> + <attribute name="sender_name"> + <ref name="up_down_name"/> + </attribute> + </optional> + <optional> + <attribute name="recipient_name"> + <ref name="up_down_name"/> + </attribute> + </optional> + <optional> + <element name="bpki_cms_cert"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_cms_glue"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="parent_query" combine="choice"> + <element name="parent"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + <ref name="parent_bool"/> + <ref name="parent_payload"/> + </element> + </define> + <define name="parent_reply" combine="choice"> + <element name="parent"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + </element> + </define> + <define name="parent_query" combine="choice"> + <element name="parent"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + <ref name="parent_bool"/> + <ref name="parent_payload"/> + </element> + </define> + <define name="parent_reply" combine="choice"> + <element name="parent"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + </element> + </define> + <define name="parent_query" combine="choice"> + <element name="parent"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + </element> + </define> + <define name="parent_reply" combine="choice"> + <element name="parent"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + <ref name="parent_payload"/> + </element> + </define> + <define name="parent_query" combine="choice"> + <element name="parent"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + </element> + </define> + <define name="parent_reply" combine="choice"> + <element name="parent"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + <ref name="parent_payload"/> + </element> + </define> + <define name="parent_query" combine="choice"> + <element name="parent"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + </element> + </define> + <define name="parent_reply" combine="choice"> + <element name="parent"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + </element> + </define> + <!-- <child/> element --> + <define name="child_handle"> + <attribute name="child_handle"> + <ref name="object_handle"/> + </attribute> + </define> + <define name="child_bool"> + <optional> + <attribute name="reissue"> + <value>yes</value> + </attribute> + </optional> + <optional> + <attribute name="clear_replay_protection"> + <value>yes</value> + </attribute> + </optional> + </define> + <define name="child_payload"> + <optional> + <ref name="bsc_handle"/> + </optional> + <optional> + <element name="bpki_cert"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_glue"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="child_query" combine="choice"> + <element name="child"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + <ref name="child_bool"/> + <ref name="child_payload"/> + </element> + </define> + <define name="child_reply" combine="choice"> + <element name="child"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + </element> + </define> + <define name="child_query" combine="choice"> + <element name="child"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + <ref name="child_bool"/> + <ref name="child_payload"/> + </element> + </define> + <define name="child_reply" combine="choice"> + <element name="child"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + </element> + </define> + <define name="child_query" combine="choice"> + <element name="child"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + </element> + </define> + <define name="child_reply" combine="choice"> + <element name="child"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + <ref name="child_payload"/> + </element> + </define> + <define name="child_query" combine="choice"> + <element name="child"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + </element> + </define> + <define name="child_reply" combine="choice"> + <element name="child"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + <ref name="child_payload"/> + </element> + </define> + <define name="child_query" combine="choice"> + <element name="child"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + </element> + </define> + <define name="child_reply" combine="choice"> + <element name="child"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + </element> + </define> + <!-- <repository/> element --> + <define name="repository_handle"> + <attribute name="repository_handle"> + <ref name="object_handle"/> + </attribute> + </define> + <define name="repository_bool"> + <optional> + <attribute name="clear_replay_protection"> + <value>yes</value> + </attribute> + </optional> + </define> + <define name="repository_payload"> + <optional> + <attribute name="peer_contact_uri"> + <ref name="uri"/> + </attribute> + </optional> + <optional> + <ref name="bsc_handle"/> + </optional> + <optional> + <element name="bpki_cert"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_glue"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="repository_query" combine="choice"> + <element name="repository"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + <ref name="repository_bool"/> + <ref name="repository_payload"/> + </element> + </define> + <define name="repository_reply" combine="choice"> + <element name="repository"> + <ref name="ctl_create"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + </element> + </define> + <define name="repository_query" combine="choice"> + <element name="repository"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + <ref name="repository_bool"/> + <ref name="repository_payload"/> + </element> + </define> + <define name="repository_reply" combine="choice"> + <element name="repository"> + <ref name="ctl_set"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + </element> + </define> + <define name="repository_query" combine="choice"> + <element name="repository"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + </element> + </define> + <define name="repository_reply" combine="choice"> + <element name="repository"> + <ref name="ctl_get"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + <ref name="repository_payload"/> + </element> + </define> + <define name="repository_query" combine="choice"> + <element name="repository"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + </element> + </define> + <define name="repository_reply" combine="choice"> + <element name="repository"> + <ref name="ctl_list"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + <ref name="repository_payload"/> + </element> + </define> + <define name="repository_query" combine="choice"> + <element name="repository"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + </element> + </define> + <define name="repository_reply" combine="choice"> + <element name="repository"> + <ref name="ctl_destroy"/> + <ref name="self_handle"/> + <ref name="repository_handle"/> + </element> + </define> + <!-- <list_resources/> element --> + <define name="list_resources_query"> + <element name="list_resources"> + <ref name="tag"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + </element> + </define> + <define name="list_resources_reply"> + <element name="list_resources"> + <ref name="tag"/> + <ref name="self_handle"/> + <ref name="child_handle"/> + <attribute name="valid_until"> + <data type="dateTime"> + <param name="pattern">.*Z</param> + </data> + </attribute> + <optional> + <attribute name="asn"> + <ref name="asn_list"/> + </attribute> + </optional> + <optional> + <attribute name="ipv4"> + <ref name="ipv4_list"/> + </attribute> + </optional> + <optional> + <attribute name="ipv6"> + <ref name="ipv6_list"/> + </attribute> + </optional> + </element> + </define> + <!-- <list_roa_requests/> element --> + <define name="list_roa_requests_query"> + <element name="list_roa_requests"> + <ref name="tag"/> + <ref name="self_handle"/> + </element> + </define> + <define name="list_roa_requests_reply"> + <element name="list_roa_requests"> + <ref name="tag"/> + <ref name="self_handle"/> + <attribute name="asn"> + <data type="nonNegativeInteger"/> + </attribute> + <optional> + <attribute name="ipv4"> + <ref name="ipv4_list"/> + </attribute> + </optional> + <optional> + <attribute name="ipv6"> + <ref name="ipv6_list"/> + </attribute> + </optional> + </element> + </define> + <!-- <list_ghostbuster_requests/> element --> + <define name="list_ghostbuster_requests_query"> + <element name="list_ghostbuster_requests"> + <ref name="tag"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + </element> + </define> + <define name="list_ghostbuster_requests_reply"> + <element name="list_ghostbuster_requests"> + <ref name="tag"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + <data type="string"/> + </element> + </define> + <!-- <list_ee_certificate_requests/> element --> + <define name="list_ee_certificate_requests_query"> + <element name="list_ee_certificate_requests"> + <ref name="tag"/> + <ref name="self_handle"/> + </element> + </define> + <define name="list_ee_certificate_requests_reply"> + <element name="list_ee_certificate_requests"> + <ref name="tag"/> + <ref name="self_handle"/> + <attribute name="gski"> + <data type="token"> + <param name="minLength">27</param> + <param name="maxLength">27</param> + </data> + </attribute> + <attribute name="valid_until"> + <data type="dateTime"> + <param name="pattern">.*Z</param> + </data> + </attribute> + <optional> + <attribute name="asn"> + <ref name="asn_list"/> + </attribute> + </optional> + <optional> + <attribute name="ipv4"> + <ref name="ipv4_list"/> + </attribute> + </optional> + <optional> + <attribute name="ipv6"> + <ref name="ipv6_list"/> + </attribute> + </optional> + <optional> + <attribute name="cn"> + <data type="string"> + <param name="maxLength">64</param> + <param name="pattern">[\-0-9A-Za-z_ ]+</param> + </data> + </attribute> + </optional> + <optional> + <attribute name="sn"> + <data type="string"> + <param name="maxLength">64</param> + <param name="pattern">[0-9A-Fa-f]+</param> + </data> + </attribute> + </optional> + <optional> + <attribute name="eku"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[.,0-9]+</param> + </data> + </attribute> + </optional> + <element name="pkcs10"> + <ref name="base64"/> + </element> + </element> + </define> + <!-- <list_published_objects/> element --> + <define name="list_published_objects_query"> + <element name="list_published_objects"> + <ref name="tag"/> + <ref name="self_handle"/> + </element> + </define> + <define name="list_published_objects_reply"> + <element name="list_published_objects"> + <ref name="tag"/> + <ref name="self_handle"/> + <attribute name="uri"> + <ref name="uri"/> + </attribute> + <optional> + <attribute name="child_handle"> + <ref name="object_handle"/> + </attribute> + </optional> + <ref name="base64"/> + </element> + </define> + <!-- <list_received_resources/> element --> + <define name="list_received_resources_query"> + <element name="list_received_resources"> + <ref name="tag"/> + <ref name="self_handle"/> + </element> + </define> + <define name="list_received_resources_reply"> + <element name="list_received_resources"> + <ref name="tag"/> + <ref name="self_handle"/> + <ref name="parent_handle"/> + <attribute name="notBefore"> + <data type="dateTime"> + <param name="pattern">.*Z</param> + </data> + </attribute> + <attribute name="notAfter"> + <data type="dateTime"> + <param name="pattern">.*Z</param> + </data> + </attribute> + <attribute name="uri"> + <ref name="uri"/> + </attribute> + <attribute name="sia_uri"> + <ref name="uri"/> + </attribute> + <attribute name="aia_uri"> + <ref name="uri"/> + </attribute> + <optional> + <attribute name="asn"> + <ref name="asn_list"/> + </attribute> + </optional> + <optional> + <attribute name="ipv4"> + <ref name="ipv4_list"/> + </attribute> + </optional> + <optional> + <attribute name="ipv6"> + <ref name="ipv6_list"/> + </attribute> + </optional> + </element> + </define> + <!-- <report_error/> element --> + <define name="error"> + <data type="token"> + <param name="maxLength">1024</param> + </data> + </define> + <define name="report_error_reply"> + <element name="report_error"> + <ref name="tag"/> + <optional> + <ref name="self_handle"/> + </optional> + <attribute name="error_code"> + <ref name="error"/> + </attribute> + <optional> + <data type="string"> + <param name="maxLength">512000</param> + </data> + </optional> + </element> + </define> +</grammar> +<!-- + Local Variables: + indent-tabs-mode: nil + comment-start: "# " + comment-start-skip: "#[ \t]*" + End: +--> +''')) + +## @var up_down +## Parsed RelaxNG up_down schema +up_down = lxml.etree.RelaxNG(lxml.etree.fromstring(r'''<?xml version="1.0" encoding="UTF-8"?> +<!-- + $Id: up-down-schema.rnc 5753 2014-04-05 19:24:26Z sra $ + + RelaxNG schema for the up-down protocol, extracted from RFC 6492. + + Copyright (c) 2012 IETF Trust and the persons identified as authors + of the code. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name of Internet Society, IETF or IETF Trust, nor the + names of specific contributors, may be used to endorse or promote + products derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +--> +<grammar ns="http://www.apnic.net/specs/rescerts/up-down/" xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes"> + <define name="resource_set_as"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,0-9]*</param> + </data> + </define> + <define name="resource_set_ip4"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,/.0-9]*</param> + </data> + </define> + <define name="resource_set_ip6"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,/:0-9a-fA-F]*</param> + </data> + </define> + <define name="class_name"> + <data type="token"> + <param name="minLength">1</param> + <param name="maxLength">1024</param> + </data> + </define> + <define name="ski"> + <data type="token"> + <param name="minLength">27</param> + <param name="maxLength">1024</param> + </data> + </define> + <define name="label"> + <data type="token"> + <param name="minLength">1</param> + <param name="maxLength">1024</param> + </data> + </define> + <define name="cert_url"> + <data type="string"> + <param name="minLength">10</param> + <param name="maxLength">4096</param> + </data> + </define> + <define name="base64_binary"> + <data type="base64Binary"> + <param name="minLength">4</param> + <param name="maxLength">512000</param> + </data> + </define> + <start> + <element name="message"> + <attribute name="version"> + <data type="positiveInteger"> + <param name="maxInclusive">1</param> + </data> + </attribute> + <attribute name="sender"> + <ref name="label"/> + </attribute> + <attribute name="recipient"> + <ref name="label"/> + </attribute> + <ref name="payload"/> + </element> + </start> + <define name="payload" combine="choice"> + <attribute name="type"> + <value>list</value> + </attribute> + <ref name="list_request"/> + </define> + <define name="payload" combine="choice"> + <attribute name="type"> + <value>list_response</value> + </attribute> + <ref name="list_response"/> + </define> + <define name="payload" combine="choice"> + <attribute name="type"> + <value>issue</value> + </attribute> + <ref name="issue_request"/> + </define> + <define name="payload" combine="choice"> + <attribute name="type"> + <value>issue_response</value> + </attribute> + <ref name="issue_response"/> + </define> + <define name="payload" combine="choice"> + <attribute name="type"> + <value>revoke</value> + </attribute> + <ref name="revoke_request"/> + </define> + <define name="payload" combine="choice"> + <attribute name="type"> + <value>revoke_response</value> + </attribute> + <ref name="revoke_response"/> + </define> + <define name="payload" combine="choice"> + <attribute name="type"> + <value>error_response</value> + </attribute> + <ref name="error_response"/> + </define> + <define name="list_request"> + <empty/> + </define> + <define name="list_response"> + <zeroOrMore> + <ref name="class"/> + </zeroOrMore> + </define> + <define name="class"> + <element name="class"> + <attribute name="class_name"> + <ref name="class_name"/> + </attribute> + <attribute name="cert_url"> + <ref name="cert_url"/> + </attribute> + <attribute name="resource_set_as"> + <ref name="resource_set_as"/> + </attribute> + <attribute name="resource_set_ipv4"> + <ref name="resource_set_ip4"/> + </attribute> + <attribute name="resource_set_ipv6"> + <ref name="resource_set_ip6"/> + </attribute> + <attribute name="resource_set_notafter"> + <data type="dateTime"/> + </attribute> + <optional> + <attribute name="suggested_sia_head"> + <data type="anyURI"> + <param name="maxLength">1024</param> + <param name="pattern">rsync://.+</param> + </data> + </attribute> + </optional> + <zeroOrMore> + <element name="certificate"> + <attribute name="cert_url"> + <ref name="cert_url"/> + </attribute> + <optional> + <attribute name="req_resource_set_as"> + <ref name="resource_set_as"/> + </attribute> + </optional> + <optional> + <attribute name="req_resource_set_ipv4"> + <ref name="resource_set_ip4"/> + </attribute> + </optional> + <optional> + <attribute name="req_resource_set_ipv6"> + <ref name="resource_set_ip6"/> + </attribute> + </optional> + <ref name="base64_binary"/> + </element> + </zeroOrMore> + <element name="issuer"> + <ref name="base64_binary"/> + </element> + </element> + </define> + <define name="issue_request"> + <element name="request"> + <attribute name="class_name"> + <ref name="class_name"/> + </attribute> + <optional> + <attribute name="req_resource_set_as"> + <ref name="resource_set_as"/> + </attribute> + </optional> + <optional> + <attribute name="req_resource_set_ipv4"> + <ref name="resource_set_ip4"/> + </attribute> + </optional> + <optional> + <attribute name="req_resource_set_ipv6"> + <ref name="resource_set_ip6"/> + </attribute> + </optional> + <ref name="base64_binary"/> + </element> + </define> + <define name="issue_response"> + <ref name="class"/> + </define> + <define name="revoke_request"> + <ref name="revocation"/> + </define> + <define name="revoke_response"> + <ref name="revocation"/> + </define> + <define name="revocation"> + <element name="key"> + <attribute name="class_name"> + <ref name="class_name"/> + </attribute> + <attribute name="ski"> + <ref name="ski"/> + </attribute> + </element> + </define> + <define name="error_response"> + <element name="status"> + <data type="positiveInteger"> + <param name="maxInclusive">9999</param> + </data> + </element> + <zeroOrMore> + <element name="description"> + <attribute name="xml:lang"> + <data type="language"/> + </attribute> + <data type="string"> + <param name="maxLength">1024</param> + </data> + </element> + </zeroOrMore> + </define> +</grammar> +<!-- + Local Variables: + indent-tabs-mode: nil + comment-start: "# " + comment-start-skip: "#[ \t]*" + End: +--> +''')) + +## @var publication +## Parsed RelaxNG publication schema +publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r'''<?xml version="1.0" encoding="UTF-8"?> +<!-- + $Id: publication-schema.rnc 5753 2014-04-05 19:24:26Z sra $ + + RelaxNG schema for RPKI publication protocol. + + Copyright (C) 2012- -2014 Dragon Research Labs ("DRL") + Portions copyright (C) 2009- -2011 Internet Systems Consortium ("ISC") + Portions copyright (C) 2007- -2008 American Registry for Internet Numbers ("ARIN") + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notices and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL + WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, + ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR + CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS + OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, + NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION + WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +--> +<grammar ns="http://www.hactrn.net/uris/rpki/publication-spec/" xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes"> + <!-- Top level PDU --> + <start> + <element name="msg"> + <attribute name="version"> + <data type="positiveInteger"> + <param name="maxInclusive">1</param> + </data> + </attribute> + <choice> + <group> + <attribute name="type"> + <value>query</value> + </attribute> + <zeroOrMore> + <ref name="query_elt"/> + </zeroOrMore> + </group> + <group> + <attribute name="type"> + <value>reply</value> + </attribute> + <zeroOrMore> + <ref name="reply_elt"/> + </zeroOrMore> + </group> + </choice> + </element> + </start> + <!-- PDUs allowed in a query --> + <define name="query_elt"> + <choice> + <ref name="config_query"/> + <ref name="client_query"/> + <ref name="certificate_query"/> + <ref name="crl_query"/> + <ref name="manifest_query"/> + <ref name="roa_query"/> + <ref name="ghostbuster_query"/> + </choice> + </define> + <!-- PDUs allowed in a reply --> + <define name="reply_elt"> + <choice> + <ref name="config_reply"/> + <ref name="client_reply"/> + <ref name="certificate_reply"/> + <ref name="crl_reply"/> + <ref name="manifest_reply"/> + <ref name="roa_reply"/> + <ref name="ghostbuster_reply"/> + <ref name="report_error_reply"/> + </choice> + </define> + <!-- Tag attributes for bulk operations --> + <define name="tag"> + <attribute name="tag"> + <data type="token"> + <param name="maxLength">1024</param> + </data> + </attribute> + </define> + <!-- + Base64 encoded DER stuff + base64 = xsd:base64Binary { maxLength="512000" } + + Sadly, it turns out that CRLs can in fact get longer than this for an active CA. + Remove length limit for now, think about whether to put it back later. + --> + <define name="base64"> + <data type="base64Binary"/> + </define> + <!-- Publication URLs --> + <define name="uri_t"> + <data type="anyURI"> + <param name="maxLength">4096</param> + </data> + </define> + <define name="uri"> + <attribute name="uri"> + <ref name="uri_t"/> + </attribute> + </define> + <!-- + Handles on remote objects (replaces passing raw SQL IDs). NB: + Unlike the up-down protocol, handles in this protocol allow "/" as a + hierarchy delimiter. + --> + <define name="object_handle"> + <data type="string"> + <param name="maxLength">255</param> + <param name="pattern">[\-_A-Za-z0-9/]+</param> + </data> + </define> + <!-- + <config/> element (use restricted to repository operator) + config_handle attribute, create, list, and destroy commands omitted deliberately, see code for details + --> + <define name="config_payload"> + <optional> + <element name="bpki_crl"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="config_query" combine="choice"> + <element name="config"> + <attribute name="action"> + <value>set</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="config_payload"/> + </element> + </define> + <define name="config_reply" combine="choice"> + <element name="config"> + <attribute name="action"> + <value>set</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + </element> + </define> + <define name="config_query" combine="choice"> + <element name="config"> + <attribute name="action"> + <value>get</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + </element> + </define> + <define name="config_reply" combine="choice"> + <element name="config"> + <attribute name="action"> + <value>get</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="config_payload"/> + </element> + </define> + <!-- <client/> element (use restricted to repository operator) --> + <define name="client_handle"> + <attribute name="client_handle"> + <ref name="object_handle"/> + </attribute> + </define> + <define name="client_bool"> + <optional> + <attribute name="clear_replay_protection"> + <value>yes</value> + </attribute> + </optional> + </define> + <define name="client_payload"> + <optional> + <attribute name="base_uri"> + <ref name="uri_t"/> + </attribute> + </optional> + <optional> + <element name="bpki_cert"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_glue"> + <ref name="base64"/> + </element> + </optional> + </define> + <define name="client_query" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>create</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + <ref name="client_bool"/> + <ref name="client_payload"/> + </element> + </define> + <define name="client_reply" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>create</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + </element> + </define> + <define name="client_query" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>set</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + <ref name="client_bool"/> + <ref name="client_payload"/> + </element> + </define> + <define name="client_reply" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>set</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + </element> + </define> + <define name="client_query" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>get</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + </element> + </define> + <define name="client_reply" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>get</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + <ref name="client_payload"/> + </element> + </define> + <define name="client_query" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>list</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + </element> + </define> + <define name="client_reply" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>list</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + <ref name="client_payload"/> + </element> + </define> + <define name="client_query" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>destroy</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + </element> + </define> + <define name="client_reply" combine="choice"> + <element name="client"> + <attribute name="action"> + <value>destroy</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="client_handle"/> + </element> + </define> + <!-- <certificate/> element --> + <define name="certificate_query" combine="choice"> + <element name="certificate"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + <ref name="base64"/> + </element> + </define> + <define name="certificate_reply" combine="choice"> + <element name="certificate"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="certificate_query" combine="choice"> + <element name="certificate"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="certificate_reply" combine="choice"> + <element name="certificate"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <!-- <crl/> element --> + <define name="crl_query" combine="choice"> + <element name="crl"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + <ref name="base64"/> + </element> + </define> + <define name="crl_reply" combine="choice"> + <element name="crl"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="crl_query" combine="choice"> + <element name="crl"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="crl_reply" combine="choice"> + <element name="crl"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <!-- <manifest/> element --> + <define name="manifest_query" combine="choice"> + <element name="manifest"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + <ref name="base64"/> + </element> + </define> + <define name="manifest_reply" combine="choice"> + <element name="manifest"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="manifest_query" combine="choice"> + <element name="manifest"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="manifest_reply" combine="choice"> + <element name="manifest"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <!-- <roa/> element --> + <define name="roa_query" combine="choice"> + <element name="roa"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + <ref name="base64"/> + </element> + </define> + <define name="roa_reply" combine="choice"> + <element name="roa"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="roa_query" combine="choice"> + <element name="roa"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="roa_reply" combine="choice"> + <element name="roa"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <!-- <ghostbuster/> element --> + <define name="ghostbuster_query" combine="choice"> + <element name="ghostbuster"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + <ref name="base64"/> + </element> + </define> + <define name="ghostbuster_reply" combine="choice"> + <element name="ghostbuster"> + <attribute name="action"> + <value>publish</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="ghostbuster_query" combine="choice"> + <element name="ghostbuster"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <define name="ghostbuster_reply" combine="choice"> + <element name="ghostbuster"> + <attribute name="action"> + <value>withdraw</value> + </attribute> + <optional> + <ref name="tag"/> + </optional> + <ref name="uri"/> + </element> + </define> + <!-- <report_error/> element --> + <define name="error"> + <data type="token"> + <param name="maxLength">1024</param> + </data> + </define> + <define name="report_error_reply"> + <element name="report_error"> + <optional> + <ref name="tag"/> + </optional> + <attribute name="error_code"> + <ref name="error"/> + </attribute> + <optional> + <data type="string"> + <param name="maxLength">512000</param> + </data> + </optional> + </element> + </define> +</grammar> +<!-- + Local Variables: + indent-tabs-mode: nil + comment-start: "# " + comment-start-skip: "#[ \t]*" + End: +--> +''')) + +## @var myrpki +## Parsed RelaxNG myrpki schema +myrpki = lxml.etree.RelaxNG(lxml.etree.fromstring(r'''<?xml version="1.0" encoding="UTF-8"?> +<!-- + $Id: myrpki.rnc 5753 2014-04-05 19:24:26Z sra $ + + RelaxNG schema for MyRPKI XML messages. + + This message protocol is on its way out, as we're in the process of + moving on from the user interface model that produced it, but even + after we finish replacing it we'll still need the schema for a while + to validate old messages when upgrading. + + libxml2 (including xmllint) only groks the XML syntax of RelaxNG, so + run the compact syntax through trang to get XML syntax. + + Copyright (C) 2009-2011 Internet Systems Consortium ("ISC") + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE + OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. +--> +<grammar ns="http://www.hactrn.net/uris/rpki/myrpki/" xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes"> + <define name="version"> + <value>2</value> + </define> + <define name="base64"> + <data type="base64Binary"> + <param name="maxLength">512000</param> + </data> + </define> + <define name="object_handle"> + <data type="string"> + <param name="maxLength">255</param> + <param name="pattern">[\-_A-Za-z0-9]+</param> + </data> + </define> + <define name="pubd_handle"> + <data type="string"> + <param name="maxLength">255</param> + <param name="pattern">[\-_A-Za-z0-9/]+</param> + </data> + </define> + <define name="uri"> + <data type="anyURI"> + <param name="maxLength">4096</param> + </data> + </define> + <define name="asn"> + <data type="positiveInteger"/> + </define> + <define name="asn_list"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,0-9]+</param> + </data> + </define> + <define name="ipv4_list"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,0-9/.]+</param> + </data> + </define> + <define name="ipv6_list"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[\-,0-9/:a-fA-F]+</param> + </data> + </define> + <define name="timestamp"> + <data type="dateTime"> + <param name="pattern">.*Z</param> + </data> + </define> + <!-- + Message formate used between configure_resources and + configure_daemons. + --> + <start combine="choice"> + <element name="myrpki"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <attribute name="handle"> + <ref name="object_handle"/> + </attribute> + <optional> + <attribute name="service_uri"> + <ref name="uri"/> + </attribute> + </optional> + <zeroOrMore> + <element name="roa_request"> + <attribute name="asn"> + <ref name="asn"/> + </attribute> + <attribute name="v4"> + <ref name="ipv4_list"/> + </attribute> + <attribute name="v6"> + <ref name="ipv6_list"/> + </attribute> + </element> + </zeroOrMore> + <zeroOrMore> + <element name="child"> + <attribute name="handle"> + <ref name="object_handle"/> + </attribute> + <attribute name="valid_until"> + <ref name="timestamp"/> + </attribute> + <optional> + <attribute name="asns"> + <ref name="asn_list"/> + </attribute> + </optional> + <optional> + <attribute name="v4"> + <ref name="ipv4_list"/> + </attribute> + </optional> + <optional> + <attribute name="v6"> + <ref name="ipv6_list"/> + </attribute> + </optional> + <optional> + <element name="bpki_certificate"> + <ref name="base64"/> + </element> + </optional> + </element> + </zeroOrMore> + <zeroOrMore> + <element name="parent"> + <attribute name="handle"> + <ref name="object_handle"/> + </attribute> + <optional> + <attribute name="service_uri"> + <ref name="uri"/> + </attribute> + </optional> + <optional> + <attribute name="myhandle"> + <ref name="object_handle"/> + </attribute> + </optional> + <optional> + <attribute name="sia_base"> + <ref name="uri"/> + </attribute> + </optional> + <optional> + <element name="bpki_cms_certificate"> + <ref name="base64"/> + </element> + </optional> + </element> + </zeroOrMore> + <zeroOrMore> + <element name="repository"> + <attribute name="handle"> + <ref name="object_handle"/> + </attribute> + <optional> + <attribute name="service_uri"> + <ref name="uri"/> + </attribute> + </optional> + <optional> + <element name="bpki_certificate"> + <ref name="base64"/> + </element> + </optional> + </element> + </zeroOrMore> + <optional> + <element name="bpki_ca_certificate"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_crl"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_bsc_certificate"> + <ref name="base64"/> + </element> + </optional> + <optional> + <element name="bpki_bsc_pkcs10"> + <ref name="base64"/> + </element> + </optional> + </element> + </start> + <!-- Format of an identity.xml file. --> + <start combine="choice"> + <element name="identity"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <attribute name="handle"> + <ref name="object_handle"/> + </attribute> + <element name="bpki_ta"> + <ref name="base64"/> + </element> + </element> + </start> + <!-- + Format of <authorization/> element used in referrals. The Base64 + text is a <referral/> (q. v.) element signed with CMS. + --> + <define name="authorization"> + <element name="authorization"> + <attribute name="referrer"> + <ref name="pubd_handle"/> + </attribute> + <ref name="base64"/> + </element> + </define> + <!-- Format of <contact_info/> element used in referrals. --> + <define name="contact_info"> + <element name="contact_info"> + <optional> + <attribute name="uri"> + <ref name="uri"/> + </attribute> + </optional> + <data type="string"/> + </element> + </define> + <!-- Variant payload portion of a <repository/> element. --> + <define name="repository_payload"> + <choice> + <attribute name="type"> + <value>none</value> + </attribute> + <attribute name="type"> + <value>offer</value> + </attribute> + <group> + <attribute name="type"> + <value>referral</value> + </attribute> + <ref name="authorization"/> + <ref name="contact_info"/> + </group> + </choice> + </define> + <!-- <parent/> element (response from configure_child). --> + <start combine="choice"> + <element name="parent"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <optional> + <attribute name="valid_until"> + <ref name="timestamp"/> + </attribute> + </optional> + <optional> + <attribute name="service_uri"> + <ref name="uri"/> + </attribute> + </optional> + <attribute name="child_handle"> + <ref name="object_handle"/> + </attribute> + <attribute name="parent_handle"> + <ref name="object_handle"/> + </attribute> + <element name="bpki_resource_ta"> + <ref name="base64"/> + </element> + <element name="bpki_child_ta"> + <ref name="base64"/> + </element> + <optional> + <element name="repository"> + <ref name="repository_payload"/> + </element> + </optional> + </element> + </start> + <!-- + <repository/> element, types offer and referral + (input to configure_publication_client). + --> + <start combine="choice"> + <element name="repository"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <attribute name="handle"> + <ref name="object_handle"/> + </attribute> + <attribute name="parent_handle"> + <ref name="object_handle"/> + </attribute> + <ref name="repository_payload"/> + <element name="bpki_client_ta"> + <ref name="base64"/> + </element> + </element> + </start> + <!-- + <repository/> element, confirmation type (output of + configure_publication_client). + --> + <start combine="choice"> + <element name="repository"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <attribute name="type"> + <value>confirmed</value> + </attribute> + <attribute name="parent_handle"> + <ref name="object_handle"/> + </attribute> + <attribute name="client_handle"> + <ref name="pubd_handle"/> + </attribute> + <attribute name="service_uri"> + <ref name="uri"/> + </attribute> + <attribute name="sia_base"> + <ref name="uri"/> + </attribute> + <element name="bpki_server_ta"> + <ref name="base64"/> + </element> + <element name="bpki_client_ta"> + <ref name="base64"/> + </element> + <optional> + <ref name="authorization"/> + </optional> + <optional> + <ref name="contact_info"/> + </optional> + </element> + </start> + <!-- + <referral/> element. This is the entirety of a separate message + which is signed with CMS then included ase the Base64 content of an + <authorization/> element in the main message. + --> + <start combine="choice"> + <element name="referral"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <attribute name="authorized_sia_base"> + <ref name="uri"/> + </attribute> + <ref name="base64"/> + </element> + </start> +</grammar> +<!-- + Local Variables: + indent-tabs-mode: nil + comment-start: "# " + comment-start-skip: "#[ \t]*" + End: +--> +''')) + +## @var router_certificate +## Parsed RelaxNG router_certificate schema +router_certificate = lxml.etree.RelaxNG(lxml.etree.fromstring(r'''<?xml version="1.0" encoding="UTF-8"?> +<!-- + $Id: router-certificate-schema.rnc 5753 2014-04-05 19:24:26Z sra $ + + RelaxNG schema for BGPSEC router certificate interchange format. + + At least for now, this is a trivial encapsulation of a PKCS #10 + request, a set (usually containing exactly one member) of autonomous + system numbers, and a router-id. Be warned that this could change + radically by the time we have any real operational understanding of + how these things will be used, this is just our current best guess + to let us move forward on initial coding. + + Copyright (C) 2014 Dragon Research Labs ("DRL") + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE + OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. +--> +<grammar ns="http://www.hactrn.net/uris/rpki/router-certificate/" xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes"> + <define name="version"> + <value>1</value> + </define> + <define name="base64"> + <data type="base64Binary"> + <param name="maxLength">512000</param> + </data> + </define> + <define name="router_id"> + <data type="unsignedInt"/> + </define> + <define name="asn_list"> + <data type="string"> + <param name="maxLength">512000</param> + <param name="pattern">[0-9][\-,0-9]*</param> + </data> + </define> + <define name="timestamp"> + <data type="dateTime"> + <param name="pattern">.*Z</param> + </data> + </define> + <!-- Core payload used in this schema. --> + <define name="payload"> + <attribute name="router_id"> + <ref name="router_id"/> + </attribute> + <attribute name="asn"> + <ref name="asn_list"/> + </attribute> + <optional> + <attribute name="valid_until"> + <ref name="timestamp"/> + </attribute> + </optional> + <ref name="base64"/> + </define> + <!-- + We allow two forms, one with a wrapper to allow multiple requests in + a single file, one without for brevity; the version attribute goes + in the outermost element in either case. + --> + <start combine="choice"> + <element name="router_certificate_request"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <ref name="payload"/> + </element> + </start> + <start combine="choice"> + <element name="router_certificate_requests"> + <attribute name="version"> + <ref name="version"/> + </attribute> + <zeroOrMore> + <element name="router_certificate_request"> + <ref name="payload"/> + </element> + </zeroOrMore> + </element> + </start> +</grammar> +<!-- + Local Variables: + indent-tabs-mode: nil + comment-start: "# " + comment-start-skip: "#[ \t]*" + End: +--> +''')) + diff --git a/rpki/resource_set.py b/rpki/resource_set.py new file mode 100644 index 00000000..2ec19cab --- /dev/null +++ b/rpki/resource_set.py @@ -0,0 +1,1148 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Classes dealing with sets of resources. + +The basic mechanics of a resource set are the same for any of the +resources we handle (ASNs, IPv4 addresses, or IPv6 addresses), so we +can provide the same operations on any of them, even though the +underlying details vary. + +We also provide some basic set operations (union, intersection, etc). +""" + +import re +import math +import rpki.exceptions +import rpki.POW + +## @var inherit_token +# Token used to indicate inheritance in read and print syntax. + +inherit_token = "<inherit>" + +re_asn_range = re.compile("^([0-9]+)-([0-9]+)$") +re_address_range = re.compile("^([0-9:.a-fA-F]+)-([0-9:.a-fA-F]+)$") +re_prefix_with_maxlen = re.compile("^([0-9:.a-fA-F]+)/([0-9]+)-([0-9]+)$") +re_prefix = re.compile("^([0-9:.a-fA-F]+)/([0-9]+)$") + +class resource_range(object): + """ + Generic resource range type. Assumes underlying type is some kind + of integer. + + This is a virtual class. You probably don't want to use this type + directly. + """ + + def __init__(self, range_min, range_max): + assert range_min.__class__ is range_max.__class__, \ + "Type mismatch, %r doesn't match %r" % (range_min.__class__, range_max.__class__) + assert range_min <= range_max, "Mis-ordered range: %s before %s" % (range_min, range_max) + self.min = range_min + self.max = range_max + + def __cmp__(self, other): + assert self.__class__ is other.__class__, \ + "Type mismatch, comparing %r with %r" % (self.__class__, other.__class__) + return cmp(self.min, other.min) or cmp(self.max, other.max) + +class resource_range_as(resource_range): + """ + Range of Autonomous System Numbers. + + Denotes a single ASN by a range whose min and max values are + identical. + """ + + ## @var datum_type + # Type of underlying data (min and max). + + datum_type = long + + def __init__(self, range_min, range_max): + resource_range.__init__(self, + long(range_min) if isinstance(range_min, int) else range_min, + long(range_max) if isinstance(range_max, int) else range_max) + + def __str__(self): + """ + Convert a resource_range_as to string format. + """ + if self.min == self.max: + return str(self.min) + else: + return str(self.min) + "-" + str(self.max) + + @classmethod + def parse_str(cls, x): + """ + Parse ASN resource range from text (eg, XML attributes). + """ + r = re_asn_range.match(x) + if r: + return cls(long(r.group(1)), long(r.group(2))) + else: + return cls(long(x), long(x)) + + @classmethod + def from_strings(cls, a, b = None): + """ + Construct ASN range from strings. + """ + if b is None: + b = a + return cls(long(a), long(b)) + +class resource_range_ip(resource_range): + """ + Range of (generic) IP addresses. + + Prefixes are converted to ranges on input, and ranges that can be + represented as prefixes are written as prefixes on output. + + This is a virtual class. You probably don't want to use it + directly. + """ + + ## @var datum_type + # Type of underlying data (min and max). + + datum_type = rpki.POW.IPAddress + + def prefixlen(self): + """ + Determine whether a resource_range_ip can be expressed as a + prefix. Returns prefix length if it can, otherwise raises + MustBePrefix exception. + """ + mask = self.min ^ self.max + if self.min & mask != 0: + raise rpki.exceptions.MustBePrefix + prefixlen = self.min.bits + while mask & 1: + prefixlen -= 1 + mask >>= 1 + if mask: + raise rpki.exceptions.MustBePrefix + return prefixlen + + @property + def can_be_prefix(self): + """ + Boolean property indicating whether this range can be expressed as + a prefix. + + This just calls .prefixlen() to do the work, so that we can keep + the logic in one place. This property is useful primarily in + context where catching an exception isn't practical. + """ + try: + self.prefixlen() + return True + except rpki.exceptions.MustBePrefix: + return False + + def __str__(self): + """ + Convert a resource_range_ip to string format. + """ + try: + return str(self.min) + "/" + str(self.prefixlen()) + except rpki.exceptions.MustBePrefix: + return str(self.min) + "-" + str(self.max) + + @classmethod + def parse_str(cls, x): + """ + Parse IP address range or prefix from text (eg, XML attributes). + """ + r = re_address_range.match(x) + if r: + return cls.from_strings(r.group(1), r.group(2)) + r = re_prefix.match(x) + if r: + a = rpki.POW.IPAddress(r.group(1)) + if cls is resource_range_ip and a.version == 4: + cls = resource_range_ipv4 + if cls is resource_range_ip and a.version == 6: + cls = resource_range_ipv6 + return cls.make_prefix(a, int(r.group(2))) + raise rpki.exceptions.BadIPResource, 'Bad IP resource "%s"' % (x) + + @classmethod + def make_prefix(cls, prefix, prefixlen): + """ + Construct a resource range corresponding to a prefix. + """ + assert isinstance(prefix, rpki.POW.IPAddress) and isinstance(prefixlen, (int, long)) + assert prefixlen >= 0 and prefixlen <= prefix.bits, "Nonsensical prefix length: %s" % prefixlen + mask = (1 << (prefix.bits - prefixlen)) - 1 + assert (prefix & mask) == 0, "Resource not in canonical form: %s/%s" % (prefix, prefixlen) + return cls(prefix, rpki.POW.IPAddress(prefix | mask)) + + def chop_into_prefixes(self, result): + """ + Chop up a resource_range_ip into ranges that can be represented as + prefixes. + """ + try: + self.prefixlen() + result.append(self) + except rpki.exceptions.MustBePrefix: + range_min = self.min + range_max = self.max + while range_max >= range_min: + bits = int(math.log(long(range_max - range_min + 1), 2)) + while True: + mask = ~(~0 << bits) + assert range_min + mask <= range_max + if range_min & mask == 0: + break + assert bits > 0 + bits -= 1 + result.append(self.make_prefix(range_min, range_min.bits - bits)) + range_min = range_min + mask + 1 + + @classmethod + def from_strings(cls, a, b = None): + """ + Construct IP address range from strings. + """ + if b is None: + b = a + a = rpki.POW.IPAddress(a) + b = rpki.POW.IPAddress(b) + if a.version != b.version: + raise TypeError + if cls is resource_range_ip: + if a.version == 4: + return resource_range_ipv4(a, b) + if a.version == 6: + return resource_range_ipv6(a, b) + elif a.version == cls.version: + return cls(a, b) + else: + raise TypeError + +class resource_range_ipv4(resource_range_ip): + """ + Range of IPv4 addresses. + """ + + version = 4 + +class resource_range_ipv6(resource_range_ip): + """ + Range of IPv6 addresses. + """ + + version = 6 + +def _rsplit(rset, that): + """ + Utility function to split a resource range into two resource ranges. + """ + + this = rset.pop(0) + + assert type(this) is type(that), "type(this) [%r] is not type(that) [%r]" % (type(this), type(that)) + + assert type(this.min) is type(that.min), "type(this.min) [%r] is not type(that.min) [%r]" % (type(this.min), type(that.min)) + assert type(this.min) is type(this.max), "type(this.min) [%r] is not type(this.max) [%r]" % (type(this.min), type(this.max)) + assert type(that.min) is type(that.max), "type(that.min) [%r] is not type(that.max) [%r]" % (type(that.min), type(that.max)) + + if this.min < that.min: + rset.insert(0, type(this)(this.min, type(that.min)(that.min - 1))) + rset.insert(1, type(this)(that.min, this.max)) + + else: + assert this.max > that.max + rset.insert(0, type(this)(this.min, that.max)) + rset.insert(1, type(this)(type(that.max)(that.max + 1), this.max)) + +class resource_set(list): + """ + Generic resource set, a list subclass containing resource ranges. + + This is a virtual class. You probably don't want to use it + directly. + """ + + ## @var inherit + # Boolean indicating whether this resource_set uses RFC 3779 inheritance. + + inherit = False + + ## @var canonical + # Whether this resource_set is currently in canonical form. + + canonical = False + + def __init__(self, ini = None, allow_overlap = False): + """ + Initialize a resource_set. + """ + list.__init__(self) + if isinstance(ini, (int, long)): + ini = str(ini) + if ini is inherit_token: + self.inherit = True + elif isinstance(ini, str) and len(ini): + self.extend(self.parse_str(s) for s in ini.split(",")) + elif isinstance(ini, list): + self.extend(ini) + elif ini is not None and ini != "": + raise ValueError("Unexpected initializer: %s" % str(ini)) + self.canonize(allow_overlap) + + def canonize(self, allow_overlap = False): + """ + Whack this resource_set into canonical form. + """ + assert not self.inherit or len(self) == 0 + if not self.canonical: + self.sort() + i = 0 + while i + 1 < len(self): + if allow_overlap and self[i].max + 1 >= self[i+1].min: + self[i] = type(self[i])(self[i].min, max(self[i].max, self[i+1].max)) + del self[i+1] + elif self[i].max + 1 == self[i+1].min: + self[i] = type(self[i])(self[i].min, self[i+1].max) + del self[i+1] + else: + i += 1 + for i in xrange(0, len(self) - 1): + if self[i].max >= self[i+1].min: + raise rpki.exceptions.ResourceOverlap("Resource overlap: %s %s" % (self[i], self[i+1])) + self.canonical = True + + def append(self, item): + """ + Wrapper around list.append() (q.v.) to reset canonical flag. + """ + list.append(self, item) + self.canonical = False + + def extend(self, item): + """ + Wrapper around list.extend() (q.v.) to reset canonical flag. + """ + list.extend(self, item) + self.canonical = False + + def __str__(self): + """ + Convert a resource_set to string format. + """ + if self.inherit: + return inherit_token + else: + return ",".join(str(x) for x in self) + + def _comm(self, other): + """ + Like comm(1), sort of. + + Returns a tuple of three resource sets: resources only in self, + resources only in other, and resources in both. Used (not very + efficiently) as the basis for most set operations on resource + sets. + """ + + assert not self.inherit + assert type(self) is type(other), "Type mismatch %r %r" % (type(self), type(other)) + set1 = type(self)(self) # clone and whack into canonical form + set2 = type(other)(other) # ditto + only1, only2, both = [], [], [] + while set1 or set2: + if set1 and (not set2 or set1[0].max < set2[0].min): + only1.append(set1.pop(0)) + elif set2 and (not set1 or set2[0].max < set1[0].min): + only2.append(set2.pop(0)) + elif set1[0].min < set2[0].min: + _rsplit(set1, set2[0]) + elif set2[0].min < set1[0].min: + _rsplit(set2, set1[0]) + elif set1[0].max < set2[0].max: + _rsplit(set2, set1[0]) + elif set2[0].max < set1[0].max: + _rsplit(set1, set2[0]) + else: + assert set1[0].min == set2[0].min and set1[0].max == set2[0].max + both.append(set1.pop(0)) + set2.pop(0) + return type(self)(only1), type(self)(only2), type(self)(both) + + def union(self, other): + """ + Set union for resource sets. + """ + + assert not self.inherit + assert type(self) is type(other), "Type mismatch: %r %r" % (type(self), type(other)) + set1 = type(self)(self) # clone and whack into canonical form + set2 = type(other)(other) # ditto + result = [] + while set1 or set2: + if set1 and (not set2 or set1[0].max < set2[0].min): + result.append(set1.pop(0)) + elif set2 and (not set1 or set2[0].max < set1[0].min): + result.append(set2.pop(0)) + else: + this = set1.pop(0) + that = set2.pop(0) + assert type(this) is type(that) + range_min = min(this.min, that.min) + range_max = max(this.max, that.max) + result.append(type(this)(range_min, range_max)) + while set1 and set1[0].max <= range_max: + assert set1[0].min >= range_min + del set1[0] + while set2 and set2[0].max <= range_max: + assert set2[0].min >= range_min + del set2[0] + return type(self)(result) + + __or__ = union + + def intersection(self, other): + """ + Set intersection for resource sets. + """ + return self._comm(other)[2] + + __and__ = intersection + + def difference(self, other): + """ + Set difference for resource sets. + """ + return self._comm(other)[0] + + __sub__ = difference + + def symmetric_difference(self, other): + """ + Set symmetric difference (XOR) for resource sets. + """ + com = self._comm(other) + return com[0] | com[1] + + __xor__ = symmetric_difference + + def contains(self, item): + """ + Set membership test for resource sets. + """ + assert not self.inherit + self.canonize() + if not self: + return False + if type(item) is type(self[0]): + range_min = item.min + range_max = item.max + else: + range_min = item + range_max = item + lo = 0 + hi = len(self) + while lo < hi: + mid = (lo + hi) / 2 + if self[mid].max < range_max: + lo = mid + 1 + else: + hi = mid + return lo < len(self) and self[lo].min <= range_min and self[lo].max >= range_max + + __contains__ = contains + + def issubset(self, other): + """ + Test whether self is a subset (possibly improper) of other. + """ + for i in self: + if not other.contains(i): + return False + return True + + __le__ = issubset + + def issuperset(self, other): + """ + Test whether self is a superset (possibly improper) of other. + """ + return other.issubset(self) + + __ge__ = issuperset + + def __lt__(self, other): + return not self.issuperset(other) + + def __gt__(self, other): + return not self.issubset(other) + + def __ne__(self, other): + """ + A set with the inherit bit set is always unequal to any other set, because + we can't know the answer here. This is also consistent with __nonzero__ + which returns True for inherit sets, and False for empty sets. + """ + return self.inherit or other.inherit or list.__ne__(self, other) + + def __eq__(self, other): + return not self.__ne__(other) + + def __nonzero__(self): + """ + Tests whether or not this set is empty. Note that sets with the inherit + bit set are considered non-empty, despite having zero length. + """ + return self.inherit or len(self) + + @classmethod + def from_sql(cls, sql, query, args = None): + """ + Create resource set from an SQL query. + + sql is an object that supports execute() and fetchall() methods + like a DB API 2.0 cursor object. + + query is an SQL query that returns a sequence of (min, max) pairs. + """ + + sql.execute(query, args) + return cls(ini = [cls.range_type(cls.range_type.datum_type(b), + cls.range_type.datum_type(e)) + for (b, e) in sql.fetchall()]) + + @classmethod + def from_django(cls, iterable): + """ + Create resource set from a Django query. + + iterable is something which returns (min, max) pairs. + """ + + return cls(ini = [cls.range_type(cls.range_type.datum_type(b), + cls.range_type.datum_type(e)) + for (b, e) in iterable]) + + @classmethod + def parse_str(cls, s): + """ + Parse resource set from text string (eg, XML attributes). This is + a backwards compatability wrapper, real functionality is now part + of the range classes. + """ + return cls.range_type.parse_str(s) + +class resource_set_as(resource_set): + """ + Autonomous System Number resource set. + """ + + ## @var range_type + # Type of range underlying this type of resource_set. + + range_type = resource_range_as + +class resource_set_ip(resource_set): + """ + (Generic) IP address resource set. + + This is a virtual class. You probably don't want to use it + directly. + """ + + def to_roa_prefix_set(self): + """ + Convert from a resource set to a ROA prefix set. + """ + prefix_ranges = [] + for r in self: + r.chop_into_prefixes(prefix_ranges) + return self.roa_prefix_set_type([ + self.roa_prefix_set_type.prefix_type(r.min, r.prefixlen()) + for r in prefix_ranges]) + +class resource_set_ipv4(resource_set_ip): + """ + IPv4 address resource set. + """ + + ## @var range_type + # Type of range underlying this type of resource_set. + + range_type = resource_range_ipv4 + +class resource_set_ipv6(resource_set_ip): + """ + IPv6 address resource set. + """ + + ## @var range_type + # Type of range underlying this type of resource_set. + + range_type = resource_range_ipv6 + +class resource_bag(object): + """ + Container to simplify passing around the usual triple of ASN, IPv4, + and IPv6 resource sets. + """ + + ## @var asn + # Set of Autonomous System Number resources. + + ## @var v4 + # Set of IPv4 resources. + + ## @var v6 + # Set of IPv6 resources. + + ## @var valid_until + # Expiration date of resources, for setting certificate notAfter field. + + def __init__(self, asn = None, v4 = None, v6 = None, valid_until = None): + self.asn = asn or resource_set_as() + self.v4 = v4 or resource_set_ipv4() + self.v6 = v6 or resource_set_ipv6() + self.valid_until = valid_until + + def oversized(self, other): + """ + True iff self is oversized with respect to other. + """ + return not self.asn.issubset(other.asn) or \ + not self.v4.issubset(other.v4) or \ + not self.v6.issubset(other.v6) + + def undersized(self, other): + """ + True iff self is undersized with respect to other. + """ + return not other.asn.issubset(self.asn) or \ + not other.v4.issubset(self.v4) or \ + not other.v6.issubset(self.v6) + + @classmethod + def from_inheritance(cls): + """ + Build a resource bag that just inherits everything from its + parent. + """ + self = cls() + self.asn = resource_set_as() + self.v4 = resource_set_ipv4() + self.v6 = resource_set_ipv6() + self.asn.inherit = True + self.v4.inherit = True + self.v6.inherit = True + return self + + @classmethod + def from_str(cls, text, allow_overlap = False): + """ + Parse a comma-separated text string into a resource_bag. Not + particularly efficient, fix that if and when it becomes an issue. + """ + asns = [] + v4s = [] + v6s = [] + for word in text.split(","): + if "." in word: + v4s.append(word) + elif ":" in word: + v6s.append(word) + else: + asns.append(word) + return cls(asn = resource_set_as(",".join(asns), allow_overlap) if asns else None, + v4 = resource_set_ipv4(",".join(v4s), allow_overlap) if v4s else None, + v6 = resource_set_ipv6(",".join(v6s), allow_overlap) if v6s else None) + + @classmethod + def from_POW_rfc3779(cls, resources): + """ + Build a resource_bag from data returned by + rpki.POW.X509.getRFC3779(). + + The conversion to long for v4 and v6 is (intended to be) + temporary: in the long run, we should be using rpki.POW.IPAddress + rather than long here. + """ + asn = inherit_token if resources[0] == "inherit" else [resource_range_as( r[0], r[1]) for r in resources[0] or ()] + v4 = inherit_token if resources[1] == "inherit" else [resource_range_ipv4(r[0], r[1]) for r in resources[1] or ()] + v6 = inherit_token if resources[2] == "inherit" else [resource_range_ipv6(r[0], r[1]) for r in resources[2] or ()] + return cls(resource_set_as(asn) if asn else None, + resource_set_ipv4(v4) if v4 else None, + resource_set_ipv6(v6) if v6 else None) + + def empty(self): + """ + True iff all resource sets in this bag are empty. + """ + return not self.asn and not self.v4 and not self.v6 + + def __nonzero__(self): + return not self.empty() + + def __eq__(self, other): + return self.asn == other.asn and \ + self.v4 == other.v4 and \ + self.v6 == other.v6 and \ + self.valid_until == other.valid_until + + def __ne__(self, other): + return not (self == other) + + def intersection(self, other): + """ + Compute intersection with another resource_bag. valid_until + attribute (if any) inherits from self. + """ + return self.__class__(self.asn & other.asn, + self.v4 & other.v4, + self.v6 & other.v6, + self.valid_until) + + __and__ = intersection + + def union(self, other): + """ + Compute union with another resource_bag. valid_until attribute + (if any) inherits from self. + """ + return self.__class__(self.asn | other.asn, + self.v4 | other.v4, + self.v6 | other.v6, + self.valid_until) + + __or__ = union + + def difference(self, other): + """ + Compute difference against another resource_bag. valid_until + attribute (if any) inherits from self + """ + return self.__class__(self.asn - other.asn, + self.v4 - other.v4, + self.v6 - other.v6, + self.valid_until) + + __sub__ = difference + + def symmetric_difference(self, other): + """ + Compute symmetric difference against another resource_bag. + valid_until attribute (if any) inherits from self + """ + return self.__class__(self.asn ^ other.asn, + self.v4 ^ other.v4, + self.v6 ^ other.v6, + self.valid_until) + + __xor__ = symmetric_difference + + def __str__(self): + s = "" + if self.asn: + s += "ASN: %s" % self.asn + if self.v4: + if s: + s += ", " + s += "V4: %s" % self.v4 + if self.v6: + if s: + s += ", " + s += "V6: %s" % self.v6 + return s + + def __iter__(self): + for r in self.asn: + yield r + for r in self.v4: + yield r + for r in self.v6: + yield r + +# Sadly, there are enough differences between RFC 3779 and the data +# structures in the latest proposed ROA format that we can't just use +# the RFC 3779 code for ROAs. So we need a separate set of classes +# that are similar in concept but different in detail, with conversion +# functions. Such is life. I suppose it might be possible to do this +# with multiple inheritance, but that's probably more bother than it's +# worth. + +class roa_prefix(object): + """ + ROA prefix. This is similar to the resource_range_ip class, but + differs in that it only represents prefixes, never ranges, and + includes the maximum prefix length as an additional value. + + This is a virtual class, you probably don't want to use it directly. + """ + + ## @var prefix + # The prefix itself, an IP address with bits beyond the prefix + # length zeroed. + + ## @var prefixlen + # (Minimum) prefix length. + + ## @var max_prefixlen + # Maxmimum prefix length. + + def __init__(self, prefix, prefixlen, max_prefixlen = None): + """ + Initialize a ROA prefix. max_prefixlen is optional and defaults + to prefixlen. max_prefixlen must not be smaller than prefixlen. + """ + if max_prefixlen is None: + max_prefixlen = prefixlen + assert max_prefixlen >= prefixlen, "Bad max_prefixlen: %d must not be shorter than %d" % (max_prefixlen, prefixlen) + self.prefix = prefix + self.prefixlen = prefixlen + self.max_prefixlen = max_prefixlen + + def __cmp__(self, other): + """ + Compare two ROA prefix objects. Comparision is based on prefix, + prefixlen, and max_prefixlen, in that order. + """ + assert self.__class__ is other.__class__ + return (cmp(self.prefix, other.prefix) or + cmp(self.prefixlen, other.prefixlen) or + cmp(self.max_prefixlen, other.max_prefixlen)) + + def __str__(self): + """ + Convert a ROA prefix to string format. + """ + if self.prefixlen == self.max_prefixlen: + return str(self.prefix) + "/" + str(self.prefixlen) + else: + return str(self.prefix) + "/" + str(self.prefixlen) + "-" + str(self.max_prefixlen) + + def to_resource_range(self): + """ + Convert this ROA prefix to the equivilent resource_range_ip + object. This is an irreversable transformation because it loses + the max_prefixlen attribute, nothing we can do about that. + """ + return self.range_type.make_prefix(self.prefix, self.prefixlen) + + def min(self): + """ + Return lowest address covered by prefix. + """ + return self.prefix + + def max(self): + """ + Return highest address covered by prefix. + """ + return self.prefix | ((1 << (self.prefix.bits - self.prefixlen)) - 1) + + def to_POW_roa_tuple(self): + """ + Convert a resource_range_ip to rpki.POW.ROA.setPrefixes() format. + """ + return self.prefix, self.prefixlen, self.max_prefixlen + + @classmethod + def parse_str(cls, x): + """ + Parse ROA prefix from text (eg, an XML attribute). + """ + r = re_prefix_with_maxlen.match(x) + if r: + return cls(rpki.POW.IPAddress(r.group(1)), int(r.group(2)), int(r.group(3))) + r = re_prefix.match(x) + if r: + return cls(rpki.POW.IPAddress(r.group(1)), int(r.group(2))) + raise rpki.exceptions.BadROAPrefix, 'Bad ROA prefix "%s"' % (x) + +class roa_prefix_ipv4(roa_prefix): + """ + IPv4 ROA prefix. + """ + + ## @var range_type + # Type of corresponding resource_range_ip. + + range_type = resource_range_ipv4 + +class roa_prefix_ipv6(roa_prefix): + """ + IPv6 ROA prefix. + """ + + ## @var range_type + # Type of corresponding resource_range_ip. + + range_type = resource_range_ipv6 + +class roa_prefix_set(list): + """ + Set of ROA prefixes, analogous to the resource_set_ip class. + """ + + def __init__(self, ini = None): + """ + Initialize a ROA prefix set. + """ + list.__init__(self) + if isinstance(ini, str) and len(ini): + self.extend(self.parse_str(s) for s in ini.split(",")) + elif isinstance(ini, (list, tuple)): + self.extend(ini) + else: + assert ini is None or ini == "", "Unexpected initializer: %s" % str(ini) + self.sort() + + def __str__(self): + """ + Convert a ROA prefix set to string format. + """ + return ",".join(str(x) for x in self) + + @classmethod + def parse_str(cls, s): + """ + Parse ROA prefix from text (eg, an XML attribute). + This method is a backwards compatability shim. + """ + return cls.prefix_type.parse_str(s) + + def to_resource_set(self): + """ + Convert a ROA prefix set to a resource set. This is an + irreversable transformation. We have to compute a union here + because ROA prefix sets can include overlaps, while RFC 3779 + resource sets cannot. This is ugly, and there is almost certainly + a more efficient way to do this, but start by getting the output + right before worrying about making it fast or pretty. + """ + r = self.resource_set_type() + s = self.resource_set_type() + s.append(None) + for p in self: + s[0] = p.to_resource_range() + r |= s + return r + + @classmethod + def from_sql(cls, sql, query, args = None): + """ + Create ROA prefix set from an SQL query. + + sql is an object that supports execute() and fetchall() methods + like a DB API 2.0 cursor object. + + query is an SQL query that returns a sequence of (prefix, + prefixlen, max_prefixlen) triples. + """ + + sql.execute(query, args) + return cls([cls.prefix_type(rpki.POW.IPAddress(x), int(y), int(z)) + for (x, y, z) in sql.fetchall()]) + + @classmethod + def from_django(cls, iterable): + """ + Create ROA prefix set from a Django query. + + iterable is something which returns (prefix, prefixlen, + max_prefixlen) triples. + """ + + return cls([cls.prefix_type(rpki.POW.IPAddress(x), int(y), int(z)) + for (x, y, z) in iterable]) + + def to_POW_roa_tuple(self): + """ + Convert ROA prefix set to form used by rpki.POW.ROA.setPrefixes(). + """ + if self: + return tuple(a.to_POW_roa_tuple() for a in self) + else: + return None + + +class roa_prefix_set_ipv4(roa_prefix_set): + """ + Set of IPv4 ROA prefixes. + """ + + ## @var prefix_type + # Type of underlying roa_prefix. + + prefix_type = roa_prefix_ipv4 + + ## @var resource_set_type + # Type of corresponding resource_set_ip class. + + resource_set_type = resource_set_ipv4 + +# Fix back link from resource_set to roa_prefix +resource_set_ipv4.roa_prefix_set_type = roa_prefix_set_ipv4 + +class roa_prefix_set_ipv6(roa_prefix_set): + """ + Set of IPv6 ROA prefixes. + """ + + ## @var prefix_type + # Type of underlying roa_prefix. + + prefix_type = roa_prefix_ipv6 + + ## @var resource_set_type + # Type of corresponding resource_set_ip class. + + resource_set_type = resource_set_ipv6 + +# Fix back link from resource_set to roa_prefix +resource_set_ipv6.roa_prefix_set_type = roa_prefix_set_ipv6 + +class roa_prefix_bag(object): + """ + Container to simplify passing around the combination of an IPv4 ROA + prefix set and an IPv6 ROA prefix set. + """ + + ## @var v4 + # Set of IPv4 prefixes. + + ## @var v6 + # Set of IPv6 prefixes. + + def __init__(self, v4 = None, v6 = None): + self.v4 = v4 or roa_prefix_set_ipv4() + self.v6 = v6 or roa_prefix_set_ipv6() + + def __eq__(self, other): + return self.v4 == other.v4 and self.v6 == other.v6 + + def __ne__(self, other): + return not (self == other) + + +# Test suite for set operations. + +if __name__ == "__main__": + + def testprefix(v): + return " (%s)" % v.to_roa_prefix_set() if isinstance(v, resource_set_ip) else "" + + def test1(t, s1, s2): + if isinstance(s1, str) and isinstance(s2, str): + print "x: ", s1 + print "y: ", s2 + r1 = t(s1) + r2 = t(s2) + print "x: ", r1, testprefix(r1) + print "y: ", r2, testprefix(r2) + v1 = r1._comm(r2) + v2 = r2._comm(r1) + assert v1[0] == v2[1] and v1[1] == v2[0] and v1[2] == v2[2] + for i in r1: assert i in r1 and i.min in r1 and i.max in r1 + for i in r2: assert i in r2 and i.min in r2 and i.max in r2 + for i in v1[0]: assert i in r1 and i not in r2 + for i in v1[1]: assert i not in r1 and i in r2 + for i in v1[2]: assert i in r1 and i in r2 + v1 = r1 | r2 + v2 = r2 | r1 + assert v1 == v2 + print "x|y:", v1, testprefix(v1) + v1 = r1 - r2 + v2 = r2 - r1 + print "x-y:", v1, testprefix(v1) + print "y-x:", v2, testprefix(v2) + v1 = r1 ^ r2 + v2 = r2 ^ r1 + assert v1 == v2 + print "x^y:", v1, testprefix(v1) + v1 = r1 & r2 + v2 = r2 & r1 + assert v1 == v2 + print "x&y:", v1, testprefix(v1) + + def test2(t, s1, s2): + print "x: ", s1 + print "y: ", s2 + r1 = t(s1) + r2 = t(s2) + print "x: ", r1 + print "y: ", r2 + print "x>y:", (r1 > r2) + print "x<y:", (r1 < r2) + test1(t.resource_set_type, + r1.to_resource_set(), + r2.to_resource_set()) + + def test3(t, s1, s2): + test1(t, s1, s2) + r1 = t(s1).to_roa_prefix_set() + r2 = t(s2).to_roa_prefix_set() + print "x: ", r1 + print "y: ", r2 + print "x>y:", (r1 > r2) + print "x<y:", (r1 < r2) + test1(t.roa_prefix_set_type.resource_set_type, + r1.to_resource_set(), + r2.to_resource_set()) + + print + print "Testing set operations on resource sets" + print + test1(resource_set_as, "1,2,3,4,5,6,11,12,13,14,15", "1,2,3,4,5,6,111,121,131,141,151") + print + test1(resource_set_ipv4, "10.0.0.44/32,10.6.0.2/32", "10.3.0.0/24,10.0.0.77/32") + print + test1(resource_set_ipv4, "10.0.0.44/32,10.6.0.2/32", "10.0.0.0/24") + print + test1(resource_set_ipv4, "10.0.0.0/24", "10.3.0.0/24,10.0.0.77/32") + print + test1(resource_set_ipv4, "10.0.0.0/24", "10.0.0.0/32,10.0.0.2/32,10.0.0.4/32") + print + print "Testing set operations on ROA prefixes" + print + test2(roa_prefix_set_ipv4, "10.0.0.44/32,10.6.0.2/32", "10.3.0.0/24,10.0.0.77/32") + print + test2(roa_prefix_set_ipv4, "10.0.0.0/24-32,10.6.0.0/24-32", "10.3.0.0/24,10.0.0.0/16-32") + print + test2(roa_prefix_set_ipv4, "10.3.0.0/24-24,10.0.0.0/16-32", "10.3.0.0/24,10.0.0.0/16-32") + print + test2(roa_prefix_set_ipv6, "2002:0a00:002c::1/128", "2002:0a00:002c::2/128") + print + test2(roa_prefix_set_ipv6, "2002:0a00:002c::1/128", "2002:0a00:002c::7/128") + print + test2(roa_prefix_set_ipv6, "2002:0a00:002c::1/128", "2002:0a00:002c::/120") + print + test2(roa_prefix_set_ipv6, "2002:0a00:002c::1/128", "2002:0a00:002c::/120-128") + print + test3(resource_set_ipv4, "10.0.0.44/32,10.6.0.2/32", "10.3.0.0/24,10.0.0.77/32") + print + test3(resource_set_ipv6, "2002:0a00:002c::1/128", "2002:0a00:002c::2/128") + print + test3(resource_set_ipv6, "2002:0a00:002c::1/128", "2002:0a00:002c::/120") diff --git a/rpki/rootd.py b/rpki/rootd.py new file mode 100644 index 00000000..43e84873 --- /dev/null +++ b/rpki/rootd.py @@ -0,0 +1,385 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Trivial RPKI up-down protocol root server. Not recommended for +production use. Overrides a bunch of method definitions from the +rpki.* classes in order to reuse as much code as possible. +""" + +import os +import time +import argparse +import sys +import rpki.resource_set +import rpki.up_down +import rpki.left_right +import rpki.x509 +import rpki.http +import rpki.config +import rpki.exceptions +import rpki.relaxng +import rpki.sundial +import rpki.log +import rpki.daemonize + +rootd = None + +class list_pdu(rpki.up_down.list_pdu): + def serve_pdu(self, q_msg, r_msg, ignored, callback, errback): + r_msg.payload = rpki.up_down.list_response_pdu() + rootd.compose_response(r_msg) + callback() + +class issue_pdu(rpki.up_down.issue_pdu): + def serve_pdu(self, q_msg, r_msg, ignored, callback, errback): + self.pkcs10.check_valid_request_ca() + r_msg.payload = rpki.up_down.issue_response_pdu() + rootd.compose_response(r_msg, self.pkcs10) + callback() + +class revoke_pdu(rpki.up_down.revoke_pdu): + def serve_pdu(self, q_msg, r_msg, ignored, callback, errback): + rpki.log.debug("Revocation requested for SKI %s" % self.ski) + subject_cert = rootd.get_subject_cert() + if subject_cert is None: + rpki.log.debug("No subject certificate, nothing to revoke") + raise rpki.exceptions.NotInDatabase + if subject_cert.gSKI() != self.ski: + rpki.log.debug("Subject certificate has different SKI %s, not revoking" % subject_cert.gSKI()) + raise rpki.exceptions.NotInDatabase + rpki.log.debug("Revoking certificate %s" % self.ski) + now = rpki.sundial.now() + rootd.revoke_subject_cert(now) + rootd.del_subject_cert() + rootd.del_subject_pkcs10() + rootd.generate_crl_and_manifest(now) + r_msg.payload = rpki.up_down.revoke_response_pdu() + r_msg.payload.class_name = self.class_name + r_msg.payload.ski = self.ski + callback() + +class error_response_pdu(rpki.up_down.error_response_pdu): + exceptions = rpki.up_down.error_response_pdu.exceptions.copy() + exceptions[rpki.exceptions.ClassNameUnknown, revoke_pdu] = 1301 + exceptions[rpki.exceptions.NotInDatabase, revoke_pdu] = 1302 + +class message_pdu(rpki.up_down.message_pdu): + + name2type = { + "list" : list_pdu, + "list_response" : rpki.up_down.list_response_pdu, + "issue" : issue_pdu, + "issue_response" : rpki.up_down.issue_response_pdu, + "revoke" : revoke_pdu, + "revoke_response" : rpki.up_down.revoke_response_pdu, + "error_response" : error_response_pdu } + + type2name = dict((v, k) for k, v in name2type.items()) + + error_pdu_type = error_response_pdu + + def log_query(self, child): + """ + Log query we're handling. + """ + rpki.log.info("Serving %s query" % self.type) + +class sax_handler(rpki.up_down.sax_handler): + pdu = message_pdu + +class cms_msg(rpki.up_down.cms_msg): + saxify = sax_handler.saxify + +class main(object): + + def get_root_cert(self): + rpki.log.debug("Read root cert %s" % self.rpki_root_cert_file) + self.rpki_root_cert = rpki.x509.X509(Auto_file = self.rpki_root_cert_file) + + def root_newer_than_subject(self): + return os.stat(self.rpki_root_cert_file).st_mtime > \ + os.stat(os.path.join(self.rpki_root_dir, self.rpki_subject_cert)).st_mtime + + def get_subject_cert(self): + filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) + try: + x = rpki.x509.X509(Auto_file = filename) + rpki.log.debug("Read subject cert %s" % filename) + return x + except IOError: + return None + + def set_subject_cert(self, cert): + filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) + rpki.log.debug("Writing subject cert %s, SKI %s" % (filename, cert.hSKI())) + f = open(filename, "wb") + f.write(cert.get_DER()) + f.close() + + def del_subject_cert(self): + filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) + rpki.log.debug("Deleting subject cert %s" % filename) + os.remove(filename) + + def get_subject_pkcs10(self): + try: + x = rpki.x509.PKCS10(Auto_file = self.rpki_subject_pkcs10) + rpki.log.debug("Read subject PKCS #10 %s" % self.rpki_subject_pkcs10) + return x + except IOError: + return None + + def set_subject_pkcs10(self, pkcs10): + rpki.log.debug("Writing subject PKCS #10 %s" % self.rpki_subject_pkcs10) + f = open(self.rpki_subject_pkcs10, "wb") + f.write(pkcs10.get_DER()) + f.close() + + def del_subject_pkcs10(self): + rpki.log.debug("Deleting subject PKCS #10 %s" % self.rpki_subject_pkcs10) + try: + os.remove(self.rpki_subject_pkcs10) + except OSError: + pass + + def issue_subject_cert_maybe(self, new_pkcs10): + now = rpki.sundial.now() + subject_cert = self.get_subject_cert() + old_pkcs10 = self.get_subject_pkcs10() + if new_pkcs10 is not None and new_pkcs10 != old_pkcs10: + self.set_subject_pkcs10(new_pkcs10) + if subject_cert is not None: + rpki.log.debug("PKCS #10 changed, regenerating subject certificate") + self.revoke_subject_cert(now) + subject_cert = None + if subject_cert is not None and subject_cert.getNotAfter() <= now + self.rpki_subject_regen: + rpki.log.debug("Subject certificate has reached expiration threshold, regenerating") + self.revoke_subject_cert(now) + subject_cert = None + if subject_cert is not None and self.root_newer_than_subject(): + rpki.log.debug("Root certificate has changed, regenerating subject") + self.revoke_subject_cert(now) + subject_cert = None + self.get_root_cert() + if subject_cert is not None: + return subject_cert + pkcs10 = old_pkcs10 if new_pkcs10 is None else new_pkcs10 + if pkcs10 is None: + rpki.log.debug("No PKCS #10 request, can't generate subject certificate yet") + return None + resources = self.rpki_root_cert.get_3779resources() + notAfter = now + self.rpki_subject_lifetime + rpki.log.info("Generating subject cert %s with resources %s, expires %s" % ( + self.rpki_base_uri + self.rpki_subject_cert, resources, notAfter)) + req_key = pkcs10.getPublicKey() + req_sia = pkcs10.get_SIA() + self.next_serial_number() + subject_cert = self.rpki_root_cert.issue( + keypair = self.rpki_root_key, + subject_key = req_key, + serial = self.serial_number, + sia = req_sia, + aia = self.rpki_root_cert_uri, + crldp = self.rpki_base_uri + self.rpki_root_crl, + resources = resources, + notBefore = now, + notAfter = notAfter) + self.set_subject_cert(subject_cert) + self.generate_crl_and_manifest(now) + return subject_cert + + def generate_crl_and_manifest(self, now): + subject_cert = self.get_subject_cert() + self.next_serial_number() + self.next_crl_number() + while self.revoked and self.revoked[0][1] + 2 * self.rpki_subject_regen < now: + del self.revoked[0] + crl = rpki.x509.CRL.generate( + keypair = self.rpki_root_key, + issuer = self.rpki_root_cert, + serial = self.crl_number, + thisUpdate = now, + nextUpdate = now + self.rpki_subject_regen, + revokedCertificates = self.revoked) + rpki.log.debug("Writing CRL %s" % os.path.join(self.rpki_root_dir, self.rpki_root_crl)) + f = open(os.path.join(self.rpki_root_dir, self.rpki_root_crl), "wb") + f.write(crl.get_DER()) + f.close() + manifest_content = [(self.rpki_root_crl, crl)] + if subject_cert is not None: + manifest_content.append((self.rpki_subject_cert, subject_cert)) + manifest_resources = rpki.resource_set.resource_bag.from_inheritance() + manifest_keypair = rpki.x509.RSA.generate() + manifest_cert = self.rpki_root_cert.issue( + keypair = self.rpki_root_key, + subject_key = manifest_keypair.get_public(), + serial = self.serial_number, + sia = (None, None, self.rpki_base_uri + self.rpki_root_manifest), + aia = self.rpki_root_cert_uri, + crldp = self.rpki_base_uri + self.rpki_root_crl, + resources = manifest_resources, + notBefore = now, + notAfter = now + self.rpki_subject_lifetime, + is_ca = False) + manifest = rpki.x509.SignedManifest.build( + serial = self.crl_number, + thisUpdate = now, + nextUpdate = now + self.rpki_subject_regen, + names_and_objs = manifest_content, + keypair = manifest_keypair, + certs = manifest_cert) + rpki.log.debug("Writing manifest %s" % os.path.join(self.rpki_root_dir, self.rpki_root_manifest)) + f = open(os.path.join(self.rpki_root_dir, self.rpki_root_manifest), "wb") + f.write(manifest.get_DER()) + f.close() + + def revoke_subject_cert(self, now): + self.revoked.append((self.get_subject_cert().getSerial(), now)) + + def compose_response(self, r_msg, pkcs10 = None): + subject_cert = self.issue_subject_cert_maybe(pkcs10) + rc = rpki.up_down.class_elt() + rc.class_name = self.rpki_class_name + rc.cert_url = rpki.up_down.multi_uri(self.rpki_root_cert_uri) + rc.from_resource_bag(self.rpki_root_cert.get_3779resources()) + rc.issuer = self.rpki_root_cert + r_msg.payload.classes.append(rc) + if subject_cert is not None: + rc.certs.append(rpki.up_down.certificate_elt()) + rc.certs[0].cert_url = rpki.up_down.multi_uri(self.rpki_base_uri + self.rpki_subject_cert) + rc.certs[0].cert = subject_cert + + def up_down_handler(self, query, path, cb): + try: + q_cms = cms_msg(DER = query) + q_msg = q_cms.unwrap((self.bpki_ta, self.child_bpki_cert)) + self.cms_timestamp = q_cms.check_replay(self.cms_timestamp, path) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + return cb(400, reason = "Could not process PDU: %s" % e) + + def done(r_msg): + cb(200, body = cms_msg().wrap( + r_msg, self.rootd_bpki_key, self.rootd_bpki_cert, + self.rootd_bpki_crl if self.include_bpki_crl else None)) + + try: + q_msg.serve_top_level(None, done) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + try: + done(q_msg.serve_error(e)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Could not process PDU: %s" % e) + + + def next_crl_number(self): + if self.crl_number is None: + try: + crl = rpki.x509.CRL(DER_file = os.path.join(self.rpki_root_dir, self.rpki_root_crl)) + self.crl_number = crl.getCRLNumber() + except: # pylint: disable=W0702 + self.crl_number = 0 + self.crl_number += 1 + return self.crl_number + + + def next_serial_number(self): + if self.serial_number is None: + subject_cert = self.get_subject_cert() + if subject_cert is not None: + self.serial_number = subject_cert.getSerial() + 1 + else: + self.serial_number = 0 + self.serial_number += 1 + return self.serial_number + + + def __init__(self): + + global rootd + rootd = self # Gross, but simpler than what we'd have to do otherwise + + self.rpki_root_cert = None + self.serial_number = None + self.crl_number = None + self.revoked = [] + self.cms_timestamp = None + + os.environ["TZ"] = "UTC" + time.tzset() + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + args = parser.parse_args() + + rpki.log.init("rootd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "rootd") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.rootd_bpki_key = rpki.x509.RSA( Auto_update = self.cfg.get("rootd-bpki-key")) + self.rootd_bpki_cert = rpki.x509.X509(Auto_update = self.cfg.get("rootd-bpki-cert")) + self.rootd_bpki_crl = rpki.x509.CRL( Auto_update = self.cfg.get("rootd-bpki-crl")) + self.child_bpki_cert = rpki.x509.X509(Auto_update = self.cfg.get("child-bpki-cert")) + + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + + self.rpki_class_name = self.cfg.get("rpki-class-name", "wombat") + + self.rpki_root_dir = self.cfg.get("rpki-root-dir") + self.rpki_base_uri = self.cfg.get("rpki-base-uri", "rsync://" + self.rpki_class_name + ".invalid/") + + self.rpki_root_key = rpki.x509.RSA(Auto_update = self.cfg.get("rpki-root-key")) + self.rpki_root_cert_file = self.cfg.get("rpki-root-cert") + self.rpki_root_cert_uri = self.cfg.get("rpki-root-cert-uri", self.rpki_base_uri + "root.cer") + + self.rpki_root_manifest = self.cfg.get("rpki-root-manifest", "root.mft") + self.rpki_root_crl = self.cfg.get("rpki-root-crl", "root.crl") + self.rpki_subject_cert = self.cfg.get("rpki-subject-cert", "child.cer") + self.rpki_subject_pkcs10 = self.cfg.get("rpki-subject-pkcs10", "child.pkcs10") + + self.rpki_subject_lifetime = rpki.sundial.timedelta.parse(self.cfg.get("rpki-subject-lifetime", "8w")) + self.rpki_subject_regen = rpki.sundial.timedelta.parse(self.cfg.get("rpki-subject-regen", self.rpki_subject_lifetime.convert_to_seconds() / 2)) + + self.include_bpki_crl = self.cfg.getboolean("include-bpki-crl", False) + + rpki.http.server(host = self.http_server_host, + port = self.http_server_port, + handlers = self.up_down_handler) diff --git a/rpki/rpkic.py b/rpki/rpkic.py new file mode 100644 index 00000000..d5339f5b --- /dev/null +++ b/rpki/rpkic.py @@ -0,0 +1,877 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Command line configuration and control tool for rpkid et al. + +Type "help" at the inernal prompt, or run the program with the --help option for +an overview of the available commands; type "help foo" for (more) detailed help +on the "foo" command. +""" + +# NB: As of this writing, I'm trying really hard to avoid having this +# program depend on a Django settings.py file. This may prove to be a +# waste of time in the long run, but for for now, this means that one +# has to be careful about exactly how and when one imports Django +# modules, or anything that imports Django modules. Bottom line is +# that we don't import such modules until we need them. + +import os +import argparse +import sys +import time +import rpki.config +import rpki.sundial +import rpki.log +import rpki.http +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.x509 +import rpki.async +import rpki.version + +from rpki.cli import Cmd, BadCommandSyntax, parsecmd, cmdarg + +class BadPrefixSyntax(Exception): "Bad prefix syntax." +class CouldntTalkToDaemon(Exception): "Couldn't talk to daemon." +class BadXMLMessage(Exception): "Bad XML message." +class PastExpiration(Exception): "Expiration date has already passed." +class CantRunRootd(Exception): "Can't run rootd." + +module_doc = __doc__ + +class main(Cmd): + + prompt = "rpkic> " + + completedefault = Cmd.filename_complete + + # Top-level argparser, for stuff that one might want when starting + # up the interactive command loop. Not sure -i belongs here, but + # it's harmless so leave it here for the moment. + + top_argparser = argparse.ArgumentParser(add_help = False) + top_argparser.add_argument("-c", "--config", + help = "override default location of configuration file") + top_argparser.add_argument("-i", "--identity", "--handle", + help = "set initial entity handdle") + top_argparser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + + # Argparser for non-interactive commands (no command loop). + + full_argparser = argparse.ArgumentParser(parents = [top_argparser], + description = module_doc) + argsubparsers = full_argparser.add_subparsers(title = "Commands", metavar = "") + + def __init__(self): + os.environ["TZ"] = "UTC" + time.tzset() + + # Try parsing just the arguments that make sense if we're + # going to be running an interactive command loop. If that + # parses everything, we're interactive, otherwise, it's either + # a non-interactive command or a parse error, so we let the full + # parser sort that out for us. + + args, argv = self.top_argparser.parse_known_args() + self.interactive = not argv + if not self.interactive: + args = self.full_argparser.parse_args() + + self.cfg_file = args.config + self.handle = args.identity + + if args.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main, args) + finally: + prof.dump_stats(args.profile) + print "Dumped profile data to %s" % args.profile + else: + self.main(args) + + def main(self, args): + rpki.log.init("rpkic", use_syslog = False) + self.read_config() + if self.interactive: + Cmd.__init__(self) + else: + args.func(self, args) + + def read_config(self): + global rpki # pylint: disable=W0602 + + try: + cfg = rpki.config.parser(self.cfg_file, "myrpki") + cfg.set_global_flags() + except IOError, e: + sys.exit("%s: %s" % (e.strerror, e.filename)) + + self.histfile = cfg.get("history_file", os.path.expanduser("~/.rpkic_history")) + self.autosync = cfg.getboolean("autosync", True, section = "rpkic") + + from django.conf import settings + + settings.configure( + DATABASES = { "default" : { + "ENGINE" : "django.db.backends.mysql", + "NAME" : cfg.get("sql-database", section = "irdbd"), + "USER" : cfg.get("sql-username", section = "irdbd"), + "PASSWORD" : cfg.get("sql-password", section = "irdbd"), + "HOST" : "", + "PORT" : "", + "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }}}, + INSTALLED_APPS = ("rpki.irdb",), + ) + + import rpki.irdb # pylint: disable=W0621 + + try: + rpki.irdb.models.ca_certificate_lifetime = rpki.sundial.timedelta.parse( + cfg.get("bpki_ca_certificate_lifetime", section = "rpkic")) + except rpki.config.ConfigParser.Error: + pass + + try: + rpki.irdb.models.ee_certificate_lifetime = rpki.sundial.timedelta.parse( + cfg.get("bpki_ee_certificate_lifetime", section = "rpkic")) + except rpki.config.ConfigParser.Error: + pass + + try: + rpki.irdb.models.crl_interval = rpki.sundial.timedelta.parse( + cfg.get("bpki_crl_interval", section = "rpkic")) + except rpki.config.ConfigParser.Error: + pass + + import django.core.management + django.core.management.call_command("syncdb", verbosity = 0, load_initial_data = False) + + self.zoo = rpki.irdb.Zookeeper(cfg = cfg, handle = self.handle, logstream = sys.stdout) + + + def do_help(self, arg): + """ + List available commands with "help" or detailed help with "help cmd". + """ + + argv = arg.split() + + if not argv: + #return self.full_argparser.print_help() + return self.print_topics( + self.doc_header, + sorted(set(name[3:] for name in self.get_names() + if name.startswith("do_") + and getattr(self, name).__doc__)), + 15, 80) + + try: + return getattr(self, "help_" + argv[0])() + except AttributeError: + pass + + func = getattr(self, "do_" + argv[0], None) + + try: + return func.argparser.print_help() + except AttributeError: + pass + + try: + return self.stdout.write(func.__doc__ + "\n") + except AttributeError: + pass + + self.stdout.write((self.nohelp + "\n") % arg) + + + def irdb_handle_complete(self, manager, text, line, begidx, endidx): + return [obj.handle for obj in manager.all() if obj.handle and obj.handle.startswith(text)] + + + @parsecmd(argsubparsers, + cmdarg("handle", help = "new handle")) + def do_select_identity(self, args): + """ + Select an identity handle for use with later commands. + """ + + self.zoo.reset_identity(args.handle) + + def complete_select_identity(self, *args): + return self.irdb_handle_complete(rpki.irdb.ResourceHolderCA.objects, *args) + + + @parsecmd(argsubparsers) + def do_initialize(self, args): + """ + Initialize an RPKI installation. DEPRECATED. + + This command reads the configuration file, creates the BPKI and + EntityDB directories, generates the initial BPKI certificates, and + creates an XML file describing the resource-holding aspect of this + RPKI installation. + """ + + rootd_case = self.zoo.run_rootd and self.zoo.handle == self.zoo.cfg.get("handle") + + r = self.zoo.initialize() + r.save("%s.identity.xml" % self.zoo.handle, + None if rootd_case else sys.stdout) + + if rootd_case: + r = self.zoo.configure_rootd() + if r is not None: + r.save("%s.%s.repository-request.xml" % (self.zoo.handle, self.zoo.handle), sys.stdout) + + self.zoo.write_bpki_files() + + + @parsecmd(argsubparsers, + cmdarg("handle", help = "handle of entity to create")) + def do_create_identity(self, args): + """ + Create a new resource-holding entity. + + Returns XML file describing the new resource holder. + + This command is idempotent: calling it for a resource holder which + already exists returns the existing identity. + """ + + self.zoo.reset_identity(args.handle) + + r = self.zoo.initialize_resource_bpki() + r.save("%s.identity.xml" % self.zoo.handle, sys.stdout) + + + @parsecmd(argsubparsers) + def do_initialize_server_bpki(self, args): + """ + Initialize server BPKI portion of an RPKI installation. + + Reads server configuration from configuration file and creates the + server BPKI objects needed to start daemons. + """ + + self.zoo.initialize_server_bpki() + self.zoo.write_bpki_files() + + + @parsecmd(argsubparsers) + def do_update_bpki(self, args): + """ + Update BPKI certificates. Assumes an existing RPKI installation. + + Basic plan here is to reissue all BPKI certificates we can, right + now. In the long run we might want to be more clever about only + touching ones that need maintenance, but this will do for a start. + + We also reissue CRLs for all CAs. + + Most likely this should be run under cron. + """ + + self.zoo.update_bpki() + self.zoo.write_bpki_files() + try: + self.zoo.synchronize_bpki() + except Exception, e: + print "Couldn't push updated BPKI material into daemons: %s" % e + + + @parsecmd(argsubparsers, + cmdarg("--child_handle", help = "override default handle for new child"), + cmdarg("--valid_until", help = "override default validity interval"), + cmdarg("child_xml", help = "XML file containing child's identity")) + def do_configure_child(self, args): + """ + Configure a new child of this RPKI entity. + + This command extracts the child's data from an XML input file, + cross-certifies the child's resource-holding BPKI certificate, and + generates an XML output file describing the relationship between + the child and this parent, including this parent's BPKI data and + up-down protocol service URI. + """ + + r, child_handle = self.zoo.configure_child(args.child_xml, args.child_handle, args.valid_until) + r.save("%s.%s.parent-response.xml" % (self.zoo.handle, child_handle), sys.stdout) + self.zoo.synchronize_ca() + + + @parsecmd(argsubparsers, + cmdarg("child_handle", help = "handle of child to delete")) + def do_delete_child(self, args): + """ + Delete a child of this RPKI entity. + """ + + try: + self.zoo.delete_child(args.child_handle) + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Child.DoesNotExist: + print "No such child \"%s\"" % args.child_handle + + def complete_delete_child(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.children, *args) + + + @parsecmd(argsubparsers, + cmdarg("--parent_handle", help = "override default handle for new parent"), + cmdarg("parent_xml", help = "XML file containing parent's response")) + def do_configure_parent(self, args): + """ + Configure a new parent of this RPKI entity. + + This command reads the parent's response XML, extracts the + parent's BPKI and service URI information, cross-certifies the + parent's BPKI data into this entity's BPKI, and checks for offers + or referrals of publication service. If a publication offer or + referral is present, we generate a request-for-service message to + that repository, in case the user wants to avail herself of the + referral or offer. + + We do NOT attempt automatic synchronization with rpkid at the + completion of this command, because synchronization at this point + will usually fail due to the repository not being set up yet. If + you know what you are doing and for some reason really want to + synchronize here, run the synchronize command yourself. + """ + + r, parent_handle = self.zoo.configure_parent(args.parent_xml, args.parent_handle) + r.save("%s.%s.repository-request.xml" % (self.zoo.handle, parent_handle), sys.stdout) + + + @parsecmd(argsubparsers, + cmdarg("parent_handle", help = "handle of parent to delete")) + def do_delete_parent(self, args): + """ + Delete a parent of this RPKI entity. + """ + + try: + self.zoo.delete_parent(args.parent_handle) + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Parent.DoesNotExist: + print "No such parent \"%s\"" % args.parent_handle + + def complete_delete_parent(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.parents, *args) + + + @parsecmd(argsubparsers) + def do_configure_root(self, args): + """ + Configure the current resource holding identity as a root. + + This configures rpkid to talk to rootd as (one of) its parent(s). + Returns repository request XML file like configure_parent does. + """ + + r = self.zoo.configure_rootd() + if r is not None: + r.save("%s.%s.repository-request.xml" % (self.zoo.handle, self.zoo.handle), sys.stdout) + self.zoo.write_bpki_files() + + + @parsecmd(argsubparsers) + def do_delete_root(self, args): + """ + Delete local RPKI root as parent of the current entity. + + This tells the current rpkid identity (<self/>) to stop talking to + rootd. + """ + + try: + self.zoo.delete_rootd() + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Rootd.DoesNotExist: + print "No associated rootd" + + + @parsecmd(argsubparsers, + cmdarg("--flat", help = "use flat publication scheme", action = "store_true"), + cmdarg("--sia_base", help = "override SIA base value"), + cmdarg("client_xml", help = "XML file containing client request")) + def do_configure_publication_client(self, args): + """ + Configure publication server to know about a new client. + + This command reads the client's request for service, + cross-certifies the client's BPKI data, and generates a response + message containing the repository's BPKI data and service URI. + """ + + r, client_handle = self.zoo.configure_publication_client(args.client_xml, args.sia_base, args.flat) + r.save("%s.repository-response.xml" % client_handle.replace("/", "."), sys.stdout) + try: + self.zoo.synchronize_pubd() + except rpki.irdb.Repository.DoesNotExist: + pass + + + @parsecmd(argsubparsers, + cmdarg("client_handle", help = "handle of client to delete")) + def do_delete_publication_client(self, args): + """ + Delete a publication client of this RPKI entity. + """ + + try: + self.zoo.delete_publication_client(args.client_handle) + self.zoo.synchronize_pubd() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Client.DoesNotExist: + print "No such client \"%s\"" % args.client_handle + + def complete_delete_publication_client(self, *args): + return self.irdb_handle_complete(self.zoo.server_ca.clients, *args) + + + @parsecmd(argsubparsers, + cmdarg("--parent_handle", help = "override default parent handle"), + cmdarg("repository_xml", help = "XML file containing repository response")) + def do_configure_repository(self, args): + """ + Configure a publication repository for this RPKI entity. + + This command reads the repository's response to this entity's + request for publication service, extracts and cross-certifies the + BPKI data and service URI, and links the repository data with the + corresponding parent data in our local database. + """ + + self.zoo.configure_repository(args.repository_xml, args.parent_handle) + self.zoo.synchronize_ca() + + + @parsecmd(argsubparsers, + cmdarg("repository_handle", help = "handle of repository to delete")) + def do_delete_repository(self, args): + """ + Delete a repository of this RPKI entity. + """ + + try: + self.zoo.delete_repository(args.repository_handle) + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Repository.DoesNotExist: + print "No such repository \"%s\"" % args.repository_handle + + def complete_delete_repository(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.repositories, *args) + + + @parsecmd(argsubparsers) + def do_delete_identity(self, args): + """ + Delete the current RPKI identity (rpkid <self/> object). + """ + + try: + self.zoo.delete_self() + self.zoo.synchronize_deleted_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + + + @parsecmd(argsubparsers, + cmdarg("--valid_until", help = "override default new validity interval"), + cmdarg("child_handle", help = "handle of child to renew")) + def do_renew_child(self, args): + """ + Update validity period for one child entity. + """ + + self.zoo.renew_children(args.child_handle, args.valid_until) + self.zoo.synchronize_ca() + if self.autosync: + self.zoo.run_rpkid_now() + + def complete_renew_child(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.children, *args) + + + @parsecmd(argsubparsers, + cmdarg("--valid_until", help = "override default new validity interval")) + def do_renew_all_children(self, args): + """ + Update validity period for all child entities. + """ + + self.zoo.renew_children(None, args.valid_until) + self.zoo.synchronize_ca() + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("prefixes_csv", help = "CSV file listing prefixes")) + def do_load_prefixes(self, args): + """ + Load prefixes into IRDB from CSV file. + """ + + self.zoo.load_prefixes(args.prefixes_csv, True) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers) + def do_show_child_resources(self, args): + """ + Show resources assigned to children. + """ + + for child in self.zoo.resource_ca.children.all(): + resources = child.resource_bag + print "Child:", child.handle + if resources.asn: + print " ASN:", resources.asn + if resources.v4: + print " IPv4:", resources.v4 + if resources.v6: + print " IPv6:", resources.v6 + + + @parsecmd(argsubparsers) + def do_show_roa_requests(self, args): + """ + Show ROA requests. + """ + + for roa_request in self.zoo.resource_ca.roa_requests.all(): + prefixes = roa_request.roa_prefix_bag + print "ASN: ", roa_request.asn + if prefixes.v4: + print " IPv4:", prefixes.v4 + if prefixes.v6: + print " IPv6:", prefixes.v6 + + + @parsecmd(argsubparsers) + def do_show_ghostbuster_requests(self, args): + """ + Show Ghostbuster requests. + """ + + for ghostbuster_request in self.zoo.resource_ca.ghostbuster_requests.all(): + print "Parent:", ghostbuster_request.parent or "*" + print ghostbuster_request.vcard + + + @parsecmd(argsubparsers) + def do_show_received_resources(self, args): + """ + Show resources received by this entity from its parent(s). + """ + + for pdu in self.zoo.call_rpkid( + rpki.left_right.list_received_resources_elt.make_pdu(self_handle = self.zoo.handle)): + + print "Parent: ", pdu.parent_handle + print " notBefore:", pdu.notBefore + print " notAfter: ", pdu.notAfter + print " URI: ", pdu.uri + print " SIA URI: ", pdu.sia_uri + print " AIA URI: ", pdu.aia_uri + print " ASN: ", pdu.asn + print " IPv4: ", pdu.ipv4 + print " IPv6: ", pdu.ipv6 + + + @parsecmd(argsubparsers) + def do_show_published_objects(self, args): + """ + Show published objects. + """ + + for pdu in self.zoo.call_rpkid( + rpki.left_right.list_published_objects_elt.make_pdu(self_handle = self.zoo.handle)): + + track = rpki.x509.uri_dispatch(pdu.uri)(Base64 = pdu.obj).tracking_data(pdu.uri) + child = pdu.child_handle + + if child is None: + print track + else: + print track, child + + + @parsecmd(argsubparsers) + def do_show_bpki(self, args): + """ + Show this entity's BPKI objects. + """ + + print "Self: ", self.zoo.resource_ca.handle + print " notBefore:", self.zoo.resource_ca.certificate.getNotBefore() + print " notAfter: ", self.zoo.resource_ca.certificate.getNotAfter() + print " Subject: ", self.zoo.resource_ca.certificate.getSubject() + print " SKI: ", self.zoo.resource_ca.certificate.hSKI() + for bsc in self.zoo.resource_ca.bscs.all(): + print "BSC: ", bsc.handle + print " notBefore:", bsc.certificate.getNotBefore() + print " notAfter: ", bsc.certificate.getNotAfter() + print " Subject: ", bsc.certificate.getSubject() + print " SKI: ", bsc.certificate.hSKI() + for parent in self.zoo.resource_ca.parents.all(): + print "Parent: ", parent.handle + print " notBefore:", parent.certificate.getNotBefore() + print " notAfter: ", parent.certificate.getNotAfter() + print " Subject: ", parent.certificate.getSubject() + print " SKI: ", parent.certificate.hSKI() + print " URL: ", parent.service_uri + for child in self.zoo.resource_ca.children.all(): + print "Child: ", child.handle + print " notBefore:", child.certificate.getNotBefore() + print " notAfter: ", child.certificate.getNotAfter() + print " Subject: ", child.certificate.getSubject() + print " SKI: ", child.certificate.hSKI() + for repository in self.zoo.resource_ca.repositories.all(): + print "Repository: ", repository.handle + print " notBefore:", repository.certificate.getNotBefore() + print " notAfter: ", repository.certificate.getNotAfter() + print " Subject: ", repository.certificate.getSubject() + print " SKI: ", repository.certificate.hSKI() + print " URL: ", repository.service_uri + + + @parsecmd(argsubparsers, + cmdarg("asns_csv", help = "CSV file listing ASNs")) + def do_load_asns(self, args): + """ + Load ASNs into IRDB from CSV file. + """ + + self.zoo.load_asns(args.asns_csv, True) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("roa_requests_csv", help = "CSV file listing ROA requests")) + def do_load_roa_requests(self, args): + """ + Load ROA requests into IRDB from CSV file. + """ + + self.zoo.load_roa_requests(args.roa_requests_csv) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("ghostbuster_requests", help = "file listing Ghostbuster requests as a sequence of VCards")) + def do_load_ghostbuster_requests(self, args): + """ + Load Ghostbuster requests into IRDB from file. + """ + + self.zoo.load_ghostbuster_requests(args.ghostbuster_requests) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("--valid_until", help = "override default validity interval"), + cmdarg("router_certificate_request_xml", help = "file containing XML router certificate request")) + def do_add_router_certificate_request(self, args): + """ + Load router certificate request(s) into IRDB from XML file. + """ + + self.zoo.add_router_certificate_request(args.router_certificate_request_xml, args.valid_until) + if self.autosync: + self.zoo.run_rpkid_now() + + @parsecmd(argsubparsers, + cmdarg("gski", help = "g(SKI) of router certificate request to delete")) + def do_delete_router_certificate_request(self, args): + """ + Delete a router certificate request from the IRDB. + """ + + try: + self.zoo.delete_router_certificate_request(args.gski) + if self.autosync: + self.zoo.run_rpkid_now() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.EECertificateRequest.DoesNotExist: + print "No certificate request matching g(SKI) \"%s\"" % args.gski + + def complete_delete_router_certificate_request(self, text, line, begidx, endidx): + return [obj.gski for obj in self.zoo.resource_ca.ee_certificate_requests.all() + if obj.gski and obj.gski.startswith(text)] + + + @parsecmd(argsubparsers) + def do_show_router_certificate_requests(self, args): + """ + Show this entity's router certificate requests. + """ + + for req in self.zoo.resource_ca.ee_certificate_requests.all(): + print "%s %s %s %s" % (req.gski, req.valid_until, req.cn, req.sn) + + + # What about updates? Validity interval, change router-id, change + # ASNs. Not sure what this looks like yet, blunder ahead with the + # core code while mulling over the UI. + + + @parsecmd(argsubparsers) + def do_synchronize(self, args): + """ + Whack daemons to match IRDB. + + This command may be replaced by implicit synchronization embedded + in of other commands, haven't decided yet. + """ + + self.zoo.synchronize() + + + @parsecmd(argsubparsers) + def do_force_publication(self, args): + """ + Whack rpkid to force (re)publication of everything. + + This is not usually necessary, as rpkid automatically publishes + changes it makes, but this command can be useful occasionally when + a fault or configuration error has left rpkid holding data which + it has not been able to publish. + """ + + self.zoo.publish_world_now() + + + @parsecmd(argsubparsers) + def do_force_reissue(self, args): + """ + Whack rpkid to force reissuance of everything. + + This is not usually necessary, as rpkid reissues automatically + objects automatically as needed, but this command can be useful + occasionally when a fault or configuration error has prevented + rpkid from reissuing when it should have. + """ + + self.zoo.reissue() + + + @parsecmd(argsubparsers) + def do_up_down_rekey(self, args): + """ + Initiate a "rekey" operation. + + This tells rpkid to generate new keys for each certificate issued + to it via the up-down protocol. + + Rekeying is the first stage of a key rollover operation. You will + need to follow it up later with a "revoke" operation to clean up + the old keys + """ + + self.zoo.rekey() + + + @parsecmd(argsubparsers) + def do_up_down_revoke(self, args): + """ + Initiate a "revoke" operation. + + This tells rpkid to clean up old keys formerly used by + certificates issued to it via the up-down protocol. + + This is the cleanup stage of a key rollover operation. + """ + + self.zoo.revoke() + + + @parsecmd(argsubparsers) + def do_revoke_forgotten(self, args): + """ + Initiate a "revoke_forgotten" operation. + + This tells rpkid to ask its parent to revoke certificates for + which rpkid does not know the private keys. + + This should never happen during ordinary operation, but can happen + if rpkid is misconfigured or its database has been damaged, so we + need a way to resynchronize rpkid with its parent in such cases. + We could do this automatically, but as we don't know the precise + cause of the failure we don't know if it's recoverable locally + (eg, from an SQL backup), so we require a manual trigger before + discarding possibly-useful certificates. + """ + + self.zoo.revoke_forgotten() + + + @parsecmd(argsubparsers) + def do_clear_all_sql_cms_replay_protection(self, args): + """ + Tell rpkid and pubd to clear replay protection. + + This clears the replay protection timestamps stored in SQL for all + entities known to rpkid and pubd. This is a fairly blunt + instrument, but as we don't expect this to be necessary except in + the case of gross misconfiguration, it should suffice + """ + + self.zoo.clear_all_sql_cms_replay_protection() + + + @parsecmd(argsubparsers) + def do_version(self, args): + """ + Show current software version number. + """ + + print rpki.version.VERSION + + + @parsecmd(argsubparsers) + def do_list_self_handles(self, args): + """ + List all <self/> handles in this rpkid instance. + """ + + for ca in rpki.irdb.ResourceHolderCA.objects.all(): + print ca.handle + diff --git a/rpki/rpkid.py b/rpki/rpkid.py new file mode 100644 index 00000000..d6163bee --- /dev/null +++ b/rpki/rpkid.py @@ -0,0 +1,2500 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI CA engine. +""" + +import os +import time +import argparse +import sys +import re +import random +import base64 +import rpki.resource_set +import rpki.up_down +import rpki.left_right +import rpki.x509 +import rpki.sql +import rpki.http +import rpki.config +import rpki.exceptions +import rpki.relaxng +import rpki.log +import rpki.async +import rpki.daemonize +import rpki.rpkid_tasks + +class main(object): + """ + Main program for rpkid. + """ + + def __init__(self): + + os.environ["TZ"] = "UTC" + time.tzset() + + self.irdbd_cms_timestamp = None + self.irbe_cms_timestamp = None + self.task_current = None + self.task_queue = [] + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + parser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + args = parser.parse_args() + + self.profile = args.profile + + rpki.log.init("rpkid", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "rpkid") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + if self.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main) + finally: + prof.dump_stats(self.profile) + rpki.log.info("Dumped profile data to %s" % self.profile) + else: + self.main() + + def main(self): + + startup_msg = self.cfg.get("startup-message", "") + if startup_msg: + rpki.log.info(startup_msg) + + if self.profile: + rpki.log.info("Running in profile mode with output to %s" % self.profile) + + self.sql = rpki.sql.session(self.cfg) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.irdb_cert = rpki.x509.X509(Auto_update = self.cfg.get("irdb-cert")) + self.irbe_cert = rpki.x509.X509(Auto_update = self.cfg.get("irbe-cert")) + self.rpkid_cert = rpki.x509.X509(Auto_update = self.cfg.get("rpkid-cert")) + self.rpkid_key = rpki.x509.RSA( Auto_update = self.cfg.get("rpkid-key")) + + self.irdb_url = self.cfg.get("irdb-url") + + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + + self.publication_kludge_base = self.cfg.get("publication-kludge-base", "publication/") + + # Icky hack to let Iain do some testing quickly, should go away + # once we sort out whether we can make this change permanent. + # + # OK, the stuff to add router certificate support makes enough + # other changes that we're going to need a migration program in + # any case, so might as well throw the switch here too, or at + # least find out if it (still) works as expected. + + self.merge_publication_directories = self.cfg.getboolean("merge_publication_directories", + True) + + self.use_internal_cron = self.cfg.getboolean("use-internal-cron", True) + + self.initial_delay = random.randint(self.cfg.getint("initial-delay-min", 10), + self.cfg.getint("initial-delay-max", 120)) + + # Should be much longer in production + self.cron_period = rpki.sundial.timedelta(seconds = self.cfg.getint("cron-period", 120)) + self.cron_keepalive = rpki.sundial.timedelta(seconds = self.cfg.getint("cron-keepalive", 0)) + if not self.cron_keepalive: + self.cron_keepalive = self.cron_period * 4 + self.cron_timeout = None + + self.start_cron() + + rpki.http.server( + host = self.http_server_host, + port = self.http_server_port, + handlers = (("/left-right", self.left_right_handler), + ("/up-down/", self.up_down_handler), + ("/cronjob", self.cronjob_handler))) + + + def start_cron(self): + """ + Start clock for rpkid's internal cron process. + """ + + if self.use_internal_cron: + self.cron_timer = rpki.async.timer(handler = self.cron) + when = rpki.sundial.now() + rpki.sundial.timedelta(seconds = self.initial_delay) + rpki.log.debug("Scheduling initial cron pass at %s" % when) + self.cron_timer.set(when) + else: + rpki.log.debug("Not using internal clock, start_cron() call ignored") + + def irdb_query(self, callback, errback, *q_pdus, **kwargs): + """ + Perform an IRDB callback query. + """ + + rpki.log.trace() + + try: + q_types = tuple(type(q_pdu) for q_pdu in q_pdus) + + expected_pdu_count = kwargs.pop("expected_pdu_count", None) + assert len(kwargs) == 0 + + q_msg = rpki.left_right.msg.query() + q_msg.extend(q_pdus) + q_der = rpki.left_right.cms_msg().wrap(q_msg, self.rpkid_key, self.rpkid_cert) + + def unwrap(r_der): + try: + r_cms = rpki.left_right.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((self.bpki_ta, self.irdb_cert)) + self.irdbd_cms_timestamp = r_cms.check_replay(self.irdbd_cms_timestamp, self.irdb_url) + if not r_msg.is_reply() or not all(type(r_pdu) in q_types for r_pdu in r_msg): + raise rpki.exceptions.BadIRDBReply( + "Unexpected response to IRDB query: %s" % r_cms.pretty_print_content()) + if expected_pdu_count is not None and len(r_msg) != expected_pdu_count: + assert isinstance(expected_pdu_count, (int, long)) + raise rpki.exceptions.BadIRDBReply( + "Expected exactly %d PDU%s from IRDB: %s" % ( + expected_pdu_count, "" if expected_pdu_count == 1 else "s", + r_cms.pretty_print_content())) + callback(r_msg) + except Exception, e: + errback(e) + + rpki.http.client( + url = self.irdb_url, + msg = q_der, + callback = unwrap, + errback = errback) + + except Exception, e: + errback(e) + + + def irdb_query_child_resources(self, self_handle, child_handle, callback, errback): + """ + Ask IRDB about a child's resources. + """ + + rpki.log.trace() + + q_pdu = rpki.left_right.list_resources_elt() + q_pdu.self_handle = self_handle + q_pdu.child_handle = child_handle + + def done(r_msg): + callback(rpki.resource_set.resource_bag( + asn = r_msg[0].asn, + v4 = r_msg[0].ipv4, + v6 = r_msg[0].ipv6, + valid_until = r_msg[0].valid_until)) + + self.irdb_query(done, errback, q_pdu, expected_pdu_count = 1) + + def irdb_query_roa_requests(self, self_handle, callback, errback): + """ + Ask IRDB about self's ROA requests. + """ + + rpki.log.trace() + + q_pdu = rpki.left_right.list_roa_requests_elt() + q_pdu.self_handle = self_handle + + self.irdb_query(callback, errback, q_pdu) + + def irdb_query_ghostbuster_requests(self, self_handle, parent_handles, callback, errback): + """ + Ask IRDB about self's ghostbuster record requests. + """ + + rpki.log.trace() + + q_pdus = [] + + for parent_handle in parent_handles: + q_pdu = rpki.left_right.list_ghostbuster_requests_elt() + q_pdu.self_handle = self_handle + q_pdu.parent_handle = parent_handle + q_pdus.append(q_pdu) + + self.irdb_query(callback, errback, *q_pdus) + + def irdb_query_ee_certificate_requests(self, self_handle, callback, errback): + """ + Ask IRDB about self's EE certificate requests. + """ + + rpki.log.trace() + + q_pdu = rpki.left_right.list_ee_certificate_requests_elt() + q_pdu.self_handle = self_handle + + self.irdb_query(callback, errback, q_pdu) + + def left_right_handler(self, query, path, cb): + """ + Process one left-right PDU. + """ + + rpki.log.trace() + + def done(r_msg): + reply = rpki.left_right.cms_msg().wrap(r_msg, self.rpkid_key, self.rpkid_cert) + self.sql.sweep() + cb(200, body = reply) + + try: + q_cms = rpki.left_right.cms_msg(DER = query) + q_msg = q_cms.unwrap((self.bpki_ta, self.irbe_cert)) + self.irbe_cms_timestamp = q_cms.check_replay(self.irbe_cms_timestamp, path) + if not q_msg.is_query(): + raise rpki.exceptions.BadQuery, "Message type is not query" + q_msg.serve_top_level(self, done) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + up_down_url_regexp = re.compile("/up-down/([-A-Z0-9_]+)/([-A-Z0-9_]+)$", re.I) + + def up_down_handler(self, query, path, cb): + """ + Process one up-down PDU. + """ + + rpki.log.trace() + + def done(reply): + self.sql.sweep() + cb(200, body = reply) + + try: + match = self.up_down_url_regexp.search(path) + if match is None: + raise rpki.exceptions.BadContactURL, "Bad URL path received in up_down_handler(): %s" % path + self_handle, child_handle = match.groups() + child = rpki.left_right.child_elt.sql_fetch_where1(self, "self.self_handle = %s AND child.child_handle = %s AND child.self_id = self.self_id", + (self_handle, child_handle), "self") + if child is None: + raise rpki.exceptions.ChildNotFound, "Could not find child %s of self %s in up_down_handler()" % (child_handle, self_handle) + child.serve_up_down(query, done) + except (rpki.async.ExitNow, SystemExit): + raise + except (rpki.exceptions.ChildNotFound, rpki.exceptions.BadContactURL), e: + rpki.log.warn(str(e)) + cb(400, reason = str(e)) + except Exception, e: + rpki.log.traceback() + cb(400, reason = "Could not process PDU: %s" % e) + + def checkpoint(self, force = False): + """ + Record that we were still alive when we got here, by resetting + keepalive timer. + """ + if force or self.cron_timeout is not None: + self.cron_timeout = rpki.sundial.now() + self.cron_keepalive + + def task_add(self, task): + """ + Add a task to the scheduler task queue, unless it's already queued. + """ + if task not in self.task_queue: + rpki.log.debug("Adding %r to task queue" % task) + self.task_queue.append(task) + return True + else: + rpki.log.debug("Task %r was already in the task queue" % task) + return False + + def task_next(self): + """ + Pull next task from the task queue and put it the deferred event + queue (we don't want to run it directly, as that could eventually + blow out our call stack). + """ + try: + self.task_current = self.task_queue.pop(0) + except IndexError: + self.task_current = None + else: + rpki.async.event_defer(self.task_current) + + def task_run(self): + """ + Run first task on the task queue, unless one is running already. + """ + if self.task_current is None: + self.task_next() + + def cron(self, cb = None): + """ + Periodic tasks. + """ + + rpki.log.trace() + + now = rpki.sundial.now() + + rpki.log.debug("Starting cron run") + + def done(): + self.sql.sweep() + self.cron_timeout = None + rpki.log.info("Finished cron run started at %s" % now) + if cb is not None: + cb() + + completion = rpki.rpkid_tasks.CompletionHandler(done) + try: + selves = rpki.left_right.self_elt.sql_fetch_all(self) + except Exception, e: + rpki.log.warn("Error pulling self_elts from SQL, maybe SQL server is down? (%s)" % e) + else: + for s in selves: + s.schedule_cron_tasks(completion) + nothing_queued = completion.count == 0 + + assert self.use_internal_cron or self.cron_timeout is None + + if self.cron_timeout is not None and self.cron_timeout < now: + rpki.log.warn("cron keepalive threshold %s has expired, breaking lock" % self.cron_timeout) + self.cron_timeout = None + + if self.use_internal_cron: + when = now + self.cron_period + rpki.log.debug("Scheduling next cron run at %s" % when) + self.cron_timer.set(when) + + if self.cron_timeout is None: + self.checkpoint(self.use_internal_cron) + self.task_run() + + elif self.use_internal_cron: + rpki.log.warn("cron already running, keepalive will expire at %s" % self.cron_timeout) + + if nothing_queued: + done() + + def cronjob_handler(self, query, path, cb): + """ + External trigger for periodic tasks. This is somewhat obsolete + now that we have internal timers, but the test framework still + uses it. + """ + + def done(): + cb(200, body = "OK") + + if self.use_internal_cron: + cb(500, reason = "Running cron internally") + else: + rpki.log.debug("Starting externally triggered cron") + self.cron(done) + +class ca_obj(rpki.sql.sql_persistent): + """ + Internal CA object. + """ + + sql_template = rpki.sql.template( + "ca", + "ca_id", + "last_crl_sn", + ("next_crl_update", rpki.sundial.datetime), + "last_issued_sn", + "last_manifest_sn", + ("next_manifest_update", rpki.sundial.datetime), + "sia_uri", + "parent_id", + "parent_resource_class") + + last_crl_sn = 0 + last_issued_sn = 0 + last_manifest_sn = 0 + + def __repr__(self): + return rpki.log.log_repr(self, repr(self.parent), self.parent_resource_class) + + @property + @rpki.sql.cache_reference + def parent(self): + """ + Fetch parent object to which this CA object links. + """ + return rpki.left_right.parent_elt.sql_fetch(self.gctx, self.parent_id) + + @property + def ca_details(self): + """ + Fetch all ca_detail objects that link to this CA object. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s", (self.ca_id,)) + + @property + def pending_ca_details(self): + """ + Fetch the pending ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'pending'", (self.ca_id,)) + + @property + def active_ca_detail(self): + """ + Fetch the active ca_detail for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where1(self.gctx, "ca_id = %s AND state = 'active'", (self.ca_id,)) + + @property + def deprecated_ca_details(self): + """ + Fetch deprecated ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'deprecated'", (self.ca_id,)) + + @property + def active_or_deprecated_ca_details(self): + """ + Fetch active and deprecated ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND (state = 'active' OR state = 'deprecated')", (self.ca_id,)) + + @property + def revoked_ca_details(self): + """ + Fetch revoked ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'revoked'", (self.ca_id,)) + + @property + def issue_response_candidate_ca_details(self): + """ + Fetch ca_details which are candidates for consideration when + processing an up-down issue_response PDU. + """ + #return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND latest_ca_cert IS NOT NULL AND state != 'revoked'", (self.ca_id,)) + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state != 'revoked'", (self.ca_id,)) + + def construct_sia_uri(self, parent, rc): + """ + Construct the sia_uri value for this CA given configured + information and the parent's up-down protocol list_response PDU. + """ + + sia_uri = rc.suggested_sia_head and rc.suggested_sia_head.rsync() + if not sia_uri or not sia_uri.startswith(parent.sia_base): + sia_uri = parent.sia_base + if not sia_uri.endswith("/"): + raise rpki.exceptions.BadURISyntax, "SIA URI must end with a slash: %s" % sia_uri + # With luck this can go away sometime soon. + if self.gctx.merge_publication_directories: + return sia_uri + else: + return sia_uri + str(self.ca_id) + "/" + + def check_for_updates(self, parent, rc, cb, eb): + """ + Parent has signaled continued existance of a resource class we + already knew about, so we need to check for an updated + certificate, changes in resource coverage, revocation and reissue + with the same key, etc. + """ + + sia_uri = self.construct_sia_uri(parent, rc) + sia_uri_changed = self.sia_uri != sia_uri + if sia_uri_changed: + rpki.log.debug("SIA changed: was %s now %s" % (self.sia_uri, sia_uri)) + self.sia_uri = sia_uri + self.sql_mark_dirty() + + rc_resources = rc.to_resource_bag() + cert_map = dict((c.cert.get_SKI(), c) for c in rc.certs) + + def loop(iterator, ca_detail): + + self.gctx.checkpoint() + + rc_cert = cert_map.pop(ca_detail.public_key.get_SKI(), None) + + if rc_cert is None: + + rpki.log.warn("SKI %s in resource class %s is in database but missing from list_response to %s from %s, maybe parent certificate went away?" + % (ca_detail.public_key.gSKI(), rc.class_name, parent.self.self_handle, parent.parent_handle)) + publisher = publication_queue() + ca_detail.delete(ca = ca_detail.ca, publisher = publisher) + return publisher.call_pubd(iterator, eb) + + else: + + if ca_detail.state == "active" and ca_detail.ca_cert_uri != rc_cert.cert_url.rsync(): + rpki.log.debug("AIA changed: was %s now %s" % (ca_detail.ca_cert_uri, rc_cert.cert_url.rsync())) + ca_detail.ca_cert_uri = rc_cert.cert_url.rsync() + ca_detail.sql_mark_dirty() + + if ca_detail.state in ("pending", "active"): + + if ca_detail.state == "pending": + current_resources = rpki.resource_set.resource_bag() + else: + current_resources = ca_detail.latest_ca_cert.get_3779resources() + + if (ca_detail.state == "pending" or + sia_uri_changed or + ca_detail.latest_ca_cert != rc_cert.cert or + ca_detail.latest_ca_cert.getNotAfter() != rc_resources.valid_until or + current_resources.undersized(rc_resources) or + current_resources.oversized(rc_resources)): + return ca_detail.update( + parent = parent, + ca = self, + rc = rc, + sia_uri_changed = sia_uri_changed, + old_resources = current_resources, + callback = iterator, + errback = eb) + + iterator() + + def done(): + if cert_map: + rpki.log.warn("Unknown certificate SKI%s %s in resource class %s in list_response " + "to %s from %s, maybe you want to \"revoke_forgotten\"?" + % ("" if len(cert_map) == 1 else "s", + ", ".join(c.cert.gSKI() for c in cert_map.values()), + rc.class_name, parent.self.self_handle, parent.parent_handle)) + self.gctx.sql.sweep() + self.gctx.checkpoint() + cb() + + ca_details = self.issue_response_candidate_ca_details + + if True: + skis_parent = set(x.cert.gSKI() + for x in cert_map.itervalues()) + skis_me = set(x.latest_ca_cert.gSKI() + for x in ca_details + if x.latest_ca_cert is not None) + for ski in skis_parent & skis_me: + rpki.log.debug("Parent %s agrees that %s has SKI %s in resource class %s" + % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name)) + for ski in skis_parent - skis_me: + rpki.log.debug("Parent %s thinks %s has SKI %s in resource class %s but I don't think so" + % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name)) + for ski in skis_me - skis_parent: + rpki.log.debug("I think %s has SKI %s in resource class %s but parent %s doesn't think so" + % (parent.self.self_handle, ski, rc.class_name, parent.parent_handle)) + + if ca_details: + rpki.async.iterator(ca_details, loop, done) + else: + rpki.log.warn("Existing resource class %s to %s from %s with no certificates, rekeying" % + (rc.class_name, parent.self.self_handle, parent.parent_handle)) + self.gctx.checkpoint() + self.rekey(cb, eb) + + @classmethod + def create(cls, parent, rc, cb, eb): + """ + Parent has signaled existance of a new resource class, so we need + to create and set up a corresponding CA object. + """ + + self = cls() + self.gctx = parent.gctx + self.parent_id = parent.parent_id + self.parent_resource_class = rc.class_name + self.sql_store() + try: + self.sia_uri = self.construct_sia_uri(parent, rc) + except rpki.exceptions.BadURISyntax: + self.sql_delete() + raise + ca_detail = ca_detail_obj.create(self) + + def done(issue_response): + c = issue_response.payload.classes[0].certs[0] + rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) + ca_detail.activate( + ca = self, + cert = c.cert, + uri = c.cert_url, + callback = cb, + errback = eb) + + rpki.log.debug("Sending issue request to %r from %r" % (parent, self.create)) + rpki.up_down.issue_pdu.query(parent, self, ca_detail, done, eb) + + def delete(self, parent, callback): + """ + The list of current resource classes received from parent does not + include the class corresponding to this CA, so we need to delete + it (and its little dog too...). + + All certs published by this CA are now invalid, so need to + withdraw them, the CRL, and the manifest from the repository, + delete all child_cert and ca_detail records associated with this + CA, then finally delete this CA itself. + """ + + def lose(e): + rpki.log.traceback() + rpki.log.warn("Could not delete CA %r, skipping: %s" % (self, e)) + callback() + + def done(): + rpki.log.debug("Deleting %r" % self) + self.sql_delete() + callback() + + publisher = publication_queue() + for ca_detail in self.ca_details: + ca_detail.delete(ca = self, publisher = publisher, allow_failure = True) + publisher.call_pubd(done, lose) + + def next_serial_number(self): + """ + Allocate a certificate serial number. + """ + self.last_issued_sn += 1 + self.sql_mark_dirty() + return self.last_issued_sn + + def next_manifest_number(self): + """ + Allocate a manifest serial number. + """ + self.last_manifest_sn += 1 + self.sql_mark_dirty() + return self.last_manifest_sn + + def next_crl_number(self): + """ + Allocate a CRL serial number. + """ + self.last_crl_sn += 1 + self.sql_mark_dirty() + return self.last_crl_sn + + def rekey(self, cb, eb): + """ + Initiate a rekey operation for this ca. Generate a new keypair. + Request cert from parent using new keypair. Mark result as our + active ca_detail. Reissue all child certs issued by this ca using + the new ca_detail. + """ + + rpki.log.trace() + + parent = self.parent + old_detail = self.active_ca_detail + new_detail = ca_detail_obj.create(self) + + def done(issue_response): + c = issue_response.payload.classes[0].certs[0] + rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) + new_detail.activate( + ca = self, + cert = c.cert, + uri = c.cert_url, + predecessor = old_detail, + callback = cb, + errback = eb) + + rpki.log.debug("Sending issue request to %r from %r" % (parent, self.rekey)) + rpki.up_down.issue_pdu.query(parent, self, new_detail, done, eb) + + def revoke(self, cb, eb, revoke_all = False): + """ + Revoke deprecated ca_detail objects associated with this CA, or + all ca_details associated with this CA if revoke_all is set. + """ + + rpki.log.trace() + + def loop(iterator, ca_detail): + ca_detail.revoke(cb = iterator, eb = eb) + + ca_details = self.ca_details if revoke_all else self.deprecated_ca_details + + rpki.async.iterator(ca_details, loop, cb) + + def reissue(self, cb, eb): + """ + Reissue all current certificates issued by this CA. + """ + + ca_detail = self.active_ca_detail + if ca_detail: + ca_detail.reissue(cb, eb) + else: + cb() + +class ca_detail_obj(rpki.sql.sql_persistent): + """ + Internal CA detail object. + """ + + sql_template = rpki.sql.template( + "ca_detail", + "ca_detail_id", + ("private_key_id", rpki.x509.RSA), + ("public_key", rpki.x509.PublicKey), + ("latest_ca_cert", rpki.x509.X509), + ("manifest_private_key_id", rpki.x509.RSA), + ("manifest_public_key", rpki.x509.PublicKey), + ("latest_manifest_cert", rpki.x509.X509), + ("latest_manifest", rpki.x509.SignedManifest), + ("latest_crl", rpki.x509.CRL), + ("crl_published", rpki.sundial.datetime), + ("manifest_published", rpki.sundial.datetime), + "state", + "ca_cert_uri", + "ca_id") + + crl_published = None + manifest_published = None + latest_ca_cert = None + latest_crl = None + latest_manifest = None + ca_cert_uri = None + + def __repr__(self): + return rpki.log.log_repr(self, repr(self.ca), self.state, self.ca_cert_uri) + + def sql_decode(self, vals): + """ + Extra assertions for SQL decode of a ca_detail_obj. + """ + rpki.sql.sql_persistent.sql_decode(self, vals) + assert self.public_key is None or self.private_key_id is None or self.public_key.get_DER() == self.private_key_id.get_public_DER() + assert self.manifest_public_key is None or self.manifest_private_key_id is None or self.manifest_public_key.get_DER() == self.manifest_private_key_id.get_public_DER() + + @property + @rpki.sql.cache_reference + def ca(self): + """ + Fetch CA object to which this ca_detail links. + """ + return ca_obj.sql_fetch(self.gctx, self.ca_id) + + def fetch_child_certs(self, child = None, ski = None, unique = False, unpublished = None): + """ + Fetch all child_cert objects that link to this ca_detail. + """ + return rpki.rpkid.child_cert_obj.fetch(self.gctx, child, self, ski, unique, unpublished) + + @property + def child_certs(self): + """ + Fetch all child_cert objects that link to this ca_detail. + """ + return self.fetch_child_certs() + + def unpublished_child_certs(self, when): + """ + Fetch all unpublished child_cert objects linked to this ca_detail + with attempted publication dates older than when. + """ + return self.fetch_child_certs(unpublished = when) + + @property + def revoked_certs(self): + """ + Fetch all revoked_cert objects that link to this ca_detail. + """ + return revoked_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + @property + def roas(self): + """ + Fetch all ROA objects that link to this ca_detail. + """ + return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + def unpublished_roas(self, when): + """ + Fetch all unpublished ROA objects linked to this ca_detail with + attempted publication dates older than when. + """ + return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s AND published IS NOT NULL and published < %s", (self.ca_detail_id, when)) + + @property + def ghostbusters(self): + """ + Fetch all Ghostbuster objects that link to this ca_detail. + """ + return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + @property + def ee_certificates(self): + """ + Fetch all EE certificate objects that link to this ca_detail. + """ + return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + def unpublished_ghostbusters(self, when): + """ + Fetch all unpublished Ghostbusters objects linked to this + ca_detail with attempted publication dates older than when. + """ + return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s AND published IS NOT NULL and published < %s", (self.ca_detail_id, when)) + + @property + def crl_uri(self): + """ + Return publication URI for this ca_detail's CRL. + """ + return self.ca.sia_uri + self.crl_uri_tail + + @property + def crl_uri_tail(self): + """ + Return tail (filename portion) of publication URI for this ca_detail's CRL. + """ + return self.public_key.gSKI() + ".crl" + + @property + def manifest_uri(self): + """ + Return publication URI for this ca_detail's manifest. + """ + return self.ca.sia_uri + self.public_key.gSKI() + ".mft" + + def has_expired(self): + """ + Return whether this ca_detail's certificate has expired. + """ + return self.latest_ca_cert.getNotAfter() <= rpki.sundial.now() + + def covers(self, target): + """ + Test whether this ca-detail covers a given set of resources. + """ + + assert not target.asn.inherit and not target.v4.inherit and not target.v6.inherit + me = self.latest_ca_cert.get_3779resources() + return target.asn <= me.asn and target.v4 <= me.v4 and target.v6 <= me.v6 + + def activate(self, ca, cert, uri, callback, errback, predecessor = None): + """ + Activate this ca_detail. + """ + + publisher = publication_queue() + + self.latest_ca_cert = cert + self.ca_cert_uri = uri.rsync() + self.generate_manifest_cert() + self.state = "active" + self.generate_crl(publisher = publisher) + self.generate_manifest(publisher = publisher) + self.sql_store() + + if predecessor is not None: + predecessor.state = "deprecated" + predecessor.sql_store() + for child_cert in predecessor.child_certs: + child_cert.reissue(ca_detail = self, publisher = publisher) + for roa in predecessor.roas: + roa.regenerate(publisher = publisher) + for ghostbuster in predecessor.ghostbusters: + ghostbuster.regenerate(publisher = publisher) + predecessor.generate_crl(publisher = publisher) + predecessor.generate_manifest(publisher = publisher) + + publisher.call_pubd(callback, errback) + + def delete(self, ca, publisher, allow_failure = False): + """ + Delete this ca_detail and all of the certs it issued. + + If allow_failure is true, we clean up as much as we can but don't + raise an exception. + """ + + repository = ca.parent.repository + handler = False if allow_failure else None + for child_cert in self.child_certs: + publisher.withdraw(cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = repository, + handler = handler) + child_cert.sql_mark_deleted() + for roa in self.roas: + roa.revoke(publisher = publisher, allow_failure = allow_failure, fast = True) + for ghostbuster in self.ghostbusters: + ghostbuster.revoke(publisher = publisher, allow_failure = allow_failure, fast = True) + try: + latest_manifest = self.latest_manifest + except AttributeError: + latest_manifest = None + if latest_manifest is not None: + publisher.withdraw(cls = rpki.publication.manifest_elt, + uri = self.manifest_uri, + obj = self.latest_manifest, + repository = repository, + handler = handler) + try: + latest_crl = self.latest_crl + except AttributeError: + latest_crl = None + if latest_crl is not None: + publisher.withdraw(cls = rpki.publication.crl_elt, + uri = self.crl_uri, + obj = self.latest_crl, + repository = repository, + handler = handler) + self.gctx.sql.sweep() + for cert in self.revoked_certs: # + self.child_certs + rpki.log.debug("Deleting %r" % cert) + cert.sql_delete() + rpki.log.debug("Deleting %r" % self) + self.sql_delete() + + def revoke(self, cb, eb): + """ + Request revocation of all certificates whose SKI matches the key + for this ca_detail. + + Tasks: + + - Request revocation of old keypair by parent. + + - Revoke all child certs issued by the old keypair. + + - Generate a final CRL, signed with the old keypair, listing all + the revoked certs, with a next CRL time after the last cert or + CRL signed by the old keypair will have expired. + + - Generate a corresponding final manifest. + + - Destroy old keypairs. + + - Leave final CRL and manifest in place until their nextupdate + time has passed. + """ + + ca = self.ca + parent = ca.parent + + def parent_revoked(r_msg): + + if r_msg.payload.ski != self.latest_ca_cert.gSKI(): + raise rpki.exceptions.SKIMismatch + + rpki.log.debug("Parent revoked %s, starting cleanup" % self.latest_ca_cert.gSKI()) + + crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) + + nextUpdate = rpki.sundial.now() + + if self.latest_manifest is not None: + self.latest_manifest.extract_if_needed() + nextUpdate = nextUpdate.later(self.latest_manifest.getNextUpdate()) + + if self.latest_crl is not None: + nextUpdate = nextUpdate.later(self.latest_crl.getNextUpdate()) + + publisher = publication_queue() + + for child_cert in self.child_certs: + nextUpdate = nextUpdate.later(child_cert.cert.getNotAfter()) + child_cert.revoke(publisher = publisher) + + for roa in self.roas: + nextUpdate = nextUpdate.later(roa.cert.getNotAfter()) + roa.revoke(publisher = publisher) + + for ghostbuster in self.ghostbusters: + nextUpdate = nextUpdate.later(ghostbuster.cert.getNotAfter()) + ghostbuster.revoke(publisher = publisher) + + nextUpdate += crl_interval + self.generate_crl(publisher = publisher, nextUpdate = nextUpdate) + self.generate_manifest(publisher = publisher, nextUpdate = nextUpdate) + self.private_key_id = None + self.manifest_private_key_id = None + self.manifest_public_key = None + self.latest_manifest_cert = None + self.state = "revoked" + self.sql_mark_dirty() + publisher.call_pubd(cb, eb) + + rpki.log.debug("Asking parent to revoke CA certificate %s" % self.latest_ca_cert.gSKI()) + rpki.up_down.revoke_pdu.query(ca, self.latest_ca_cert.gSKI(), parent_revoked, eb) + + def update(self, parent, ca, rc, sia_uri_changed, old_resources, callback, errback): + """ + Need to get a new certificate for this ca_detail and perhaps frob + children of this ca_detail. + """ + + def issued(issue_response): + c = issue_response.payload.classes[0].certs[0] + rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) + + if self.state == "pending": + return self.activate( + ca = ca, + cert = c.cert, + uri = c.cert_url, + callback = callback, + errback = errback) + + validity_changed = self.latest_ca_cert is None or self.latest_ca_cert.getNotAfter() != c.cert.getNotAfter() + + publisher = publication_queue() + + if self.latest_ca_cert != c.cert: + self.latest_ca_cert = c.cert + self.sql_mark_dirty() + self.generate_manifest_cert() + self.generate_crl(publisher = publisher) + self.generate_manifest(publisher = publisher) + + new_resources = self.latest_ca_cert.get_3779resources() + + if sia_uri_changed or old_resources.oversized(new_resources): + for child_cert in self.child_certs: + child_resources = child_cert.cert.get_3779resources() + if sia_uri_changed or child_resources.oversized(new_resources): + child_cert.reissue( + ca_detail = self, + resources = child_resources & new_resources, + publisher = publisher) + + if sia_uri_changed or validity_changed or old_resources.oversized(new_resources): + for roa in self.roas: + roa.update(publisher = publisher, fast = True) + + if sia_uri_changed or validity_changed: + for ghostbuster in self.ghostbusters: + ghostbuster.update(publisher = publisher, fast = True) + + publisher.call_pubd(callback, errback) + + rpki.log.debug("Sending issue request to %r from %r" % (parent, self.update)) + rpki.up_down.issue_pdu.query(parent, ca, self, issued, errback) + + @classmethod + def create(cls, ca): + """ + Create a new ca_detail object for a specified CA. + """ + self = cls() + self.gctx = ca.gctx + self.ca_id = ca.ca_id + self.state = "pending" + + self.private_key_id = rpki.x509.RSA.generate() + self.public_key = self.private_key_id.get_public() + + self.manifest_private_key_id = rpki.x509.RSA.generate() + self.manifest_public_key = self.manifest_private_key_id.get_public() + + self.sql_store() + return self + + def issue_ee(self, ca, resources, subject_key, sia, + cn = None, sn = None, notAfter = None, eku = None): + """ + Issue a new EE certificate. + """ + + if notAfter is None: + notAfter = self.latest_ca_cert.getNotAfter() + + return self.latest_ca_cert.issue( + keypair = self.private_key_id, + subject_key = subject_key, + serial = ca.next_serial_number(), + sia = sia, + aia = self.ca_cert_uri, + crldp = self.crl_uri, + resources = resources, + notAfter = notAfter, + is_ca = False, + cn = cn, + sn = sn, + eku = eku) + + def generate_manifest_cert(self): + """ + Generate a new manifest certificate for this ca_detail. + """ + + resources = rpki.resource_set.resource_bag.from_inheritance() + self.latest_manifest_cert = self.issue_ee( + ca = self.ca, + resources = resources, + subject_key = self.manifest_public_key, + sia = (None, None, self.manifest_uri)) + + def issue(self, ca, child, subject_key, sia, resources, publisher, child_cert = None): + """ + Issue a new certificate to a child. Optional child_cert argument + specifies an existing child_cert object to update in place; if not + specified, we create a new one. Returns the child_cert object + containing the newly issued cert. + """ + + self.check_failed_publication(publisher) + + assert child_cert is None or child_cert.child_id == child.child_id + + cert = self.latest_ca_cert.issue( + keypair = self.private_key_id, + subject_key = subject_key, + serial = ca.next_serial_number(), + aia = self.ca_cert_uri, + crldp = self.crl_uri, + sia = sia, + resources = resources, + notAfter = resources.valid_until) + + if child_cert is None: + child_cert = rpki.rpkid.child_cert_obj( + gctx = child.gctx, + child_id = child.child_id, + ca_detail_id = self.ca_detail_id, + cert = cert) + rpki.log.debug("Created new child_cert %r" % child_cert) + else: + child_cert.cert = cert + del child_cert.ca_detail + child_cert.ca_detail_id = self.ca_detail_id + rpki.log.debug("Reusing existing child_cert %r" % child_cert) + + child_cert.ski = cert.get_SKI() + child_cert.published = rpki.sundial.now() + child_cert.sql_store() + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = ca.parent.repository, + handler = child_cert.published_callback) + self.generate_manifest(publisher = publisher) + return child_cert + + def generate_crl(self, publisher, nextUpdate = None): + """ + Generate a new CRL for this ca_detail. At the moment this is + unconditional, that is, it is up to the caller to decide whether a + new CRL is needed. + """ + + self.check_failed_publication(publisher) + + ca = self.ca + parent = ca.parent + crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) + now = rpki.sundial.now() + + if nextUpdate is None: + nextUpdate = now + crl_interval + + certlist = [] + for revoked_cert in self.revoked_certs: + if now > revoked_cert.expires + crl_interval: + revoked_cert.sql_delete() + else: + certlist.append((revoked_cert.serial, revoked_cert.revoked)) + certlist.sort() + + self.latest_crl = rpki.x509.CRL.generate( + keypair = self.private_key_id, + issuer = self.latest_ca_cert, + serial = ca.next_crl_number(), + thisUpdate = now, + nextUpdate = nextUpdate, + revokedCertificates = certlist) + + self.crl_published = rpki.sundial.now() + self.sql_mark_dirty() + publisher.publish( + cls = rpki.publication.crl_elt, + uri = self.crl_uri, + obj = self.latest_crl, + repository = parent.repository, + handler = self.crl_published_callback) + + def crl_published_callback(self, pdu): + """ + Check result of CRL publication. + """ + pdu.raise_if_error() + self.crl_published = None + self.sql_mark_dirty() + + def generate_manifest(self, publisher, nextUpdate = None): + """ + Generate a new manifest for this ca_detail. + """ + + self.check_failed_publication(publisher) + + ca = self.ca + parent = ca.parent + crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) + now = rpki.sundial.now() + uri = self.manifest_uri + + if nextUpdate is None: + nextUpdate = now + crl_interval + + if self.latest_manifest_cert is None or self.latest_manifest_cert.getNotAfter() < nextUpdate: + rpki.log.debug("Generating EE certificate for %s" % uri) + self.generate_manifest_cert() + rpki.log.debug("Latest CA cert notAfter %s, new %s EE notAfter %s" % ( + self.latest_ca_cert.getNotAfter(), uri, self.latest_manifest_cert.getNotAfter())) + + rpki.log.debug("Constructing manifest object list for %s" % uri) + objs = [(self.crl_uri_tail, self.latest_crl)] + objs.extend((c.uri_tail, c.cert) for c in self.child_certs) + objs.extend((r.uri_tail, r.roa) for r in self.roas if r.roa is not None) + objs.extend((g.uri_tail, g.ghostbuster) for g in self.ghostbusters) + objs.extend((e.uri_tail, e.cert) for e in self.ee_certificates) + + rpki.log.debug("Building manifest object %s" % uri) + self.latest_manifest = rpki.x509.SignedManifest.build( + serial = ca.next_manifest_number(), + thisUpdate = now, + nextUpdate = nextUpdate, + names_and_objs = objs, + keypair = self.manifest_private_key_id, + certs = self.latest_manifest_cert) + + rpki.log.debug("Manifest generation took %s" % (rpki.sundial.now() - now)) + + self.manifest_published = rpki.sundial.now() + self.sql_mark_dirty() + publisher.publish(cls = rpki.publication.manifest_elt, + uri = uri, + obj = self.latest_manifest, + repository = parent.repository, + handler = self.manifest_published_callback) + + def manifest_published_callback(self, pdu): + """ + Check result of manifest publication. + """ + pdu.raise_if_error() + self.manifest_published = None + self.sql_mark_dirty() + + def reissue(self, cb, eb): + """ + Reissue all current certificates issued by this ca_detail. + """ + + publisher = publication_queue() + self.check_failed_publication(publisher) + for roa in self.roas: + roa.regenerate(publisher, fast = True) + for ghostbuster in self.ghostbusters: + ghostbuster.regenerate(publisher, fast = True) + for ee_certificate in self.ee_certificates: + ee_certificate.reissue(publisher, force = True) + for child_cert in self.child_certs: + child_cert.reissue(self, publisher, force = True) + self.gctx.sql.sweep() + self.generate_manifest_cert() + self.sql_mark_dirty() + self.generate_crl(publisher = publisher) + self.generate_manifest(publisher = publisher) + self.gctx.sql.sweep() + publisher.call_pubd(cb, eb) + + def check_failed_publication(self, publisher, check_all = True): + """ + Check for failed publication of objects issued by this ca_detail. + + All publishable objects have timestamp fields recording time of + last attempted publication, and callback methods which clear these + timestamps once publication has succeeded. Our task here is to + look for objects issued by this ca_detail which have timestamps + set (indicating that they have not been published) and for which + the timestamps are not very recent (for some definition of very + recent -- intent is to allow a bit of slack in case pubd is just + being slow). In such cases, we want to retry publication. + + As an optimization, we can probably skip checking other products + if manifest and CRL have been published, thus saving ourselves + several complex SQL queries. Not sure yet whether this + optimization is worthwhile. + + For the moment we check everything without optimization, because + it simplifies testing. + + For the moment our definition of staleness is hardwired; this + should become configurable. + """ + + rpki.log.debug("Checking for failed publication for %r" % self) + + stale = rpki.sundial.now() - rpki.sundial.timedelta(seconds = 60) + repository = self.ca.parent.repository + + if self.latest_crl is not None and \ + self.crl_published is not None and \ + self.crl_published < stale: + rpki.log.debug("Retrying publication for %s" % self.crl_uri) + publisher.publish(cls = rpki.publication.crl_elt, + uri = self.crl_uri, + obj = self.latest_crl, + repository = repository, + handler = self.crl_published_callback) + + if self.latest_manifest is not None and \ + self.manifest_published is not None and \ + self.manifest_published < stale: + rpki.log.debug("Retrying publication for %s" % self.manifest_uri) + publisher.publish(cls = rpki.publication.manifest_elt, + uri = self.manifest_uri, + obj = self.latest_manifest, + repository = repository, + handler = self.manifest_published_callback) + + if not check_all: + return + + # Might also be able to return here if manifest and CRL are up to + # date, but let's avoid premature optimization + + for child_cert in self.unpublished_child_certs(stale): + rpki.log.debug("Retrying publication for %s" % child_cert) + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = repository, + handler = child_cert.published_callback) + + for roa in self.unpublished_roas(stale): + rpki.log.debug("Retrying publication for %s" % roa) + publisher.publish( + cls = rpki.publication.roa_elt, + uri = roa.uri, + obj = roa.roa, + repository = repository, + handler = roa.published_callback) + + for ghostbuster in self.unpublished_ghostbusters(stale): + rpki.log.debug("Retrying publication for %s" % ghostbuster) + publisher.publish( + cls = rpki.publication.ghostbuster_elt, + uri = ghostbuster.uri, + obj = ghostbuster.ghostbuster, + repository = repository, + handler = ghostbuster.published_callback) + +class child_cert_obj(rpki.sql.sql_persistent): + """ + Certificate that has been issued to a child. + """ + + sql_template = rpki.sql.template( + "child_cert", + "child_cert_id", + ("cert", rpki.x509.X509), + "child_id", + "ca_detail_id", + "ski", + ("published", rpki.sundial.datetime)) + + def __repr__(self): + args = [self] + try: + args.append(self.uri) + except: + pass + return rpki.log.log_repr(*args) + + def __init__(self, gctx = None, child_id = None, ca_detail_id = None, cert = None): + """ + Initialize a child_cert_obj. + """ + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.child_id = child_id + self.ca_detail_id = ca_detail_id + self.cert = cert + self.published = None + if child_id or ca_detail_id or cert: + self.sql_mark_dirty() + + @property + @rpki.sql.cache_reference + def child(self): + """ + Fetch child object to which this child_cert object links. + """ + return rpki.left_right.child_elt.sql_fetch(self.gctx, self.child_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this child_cert object links. + """ + return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @ca_detail.deleter + def ca_detail(self): + try: + del self._ca_detail + except AttributeError: + pass + + @property + def uri_tail(self): + """ + Return the tail (filename) portion of the URI for this child_cert. + """ + return self.cert.gSKI() + ".cer" + + @property + def uri(self): + """ + Return the publication URI for this child_cert. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + def revoke(self, publisher, generate_crl_and_manifest = True): + """ + Revoke a child cert. + """ + + ca_detail = self.ca_detail + ca = ca_detail.ca + rpki.log.debug("Revoking %r %r" % (self, self.uri)) + revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail) + publisher.withdraw( + cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca.parent.repository) + self.gctx.sql.sweep() + self.sql_delete() + if generate_crl_and_manifest: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + def reissue(self, ca_detail, publisher, resources = None, sia = None, force = False): + """ + Reissue an existing child cert, reusing the public key. If the + child cert we would generate is identical to the one we already + have, we just return the one we already have. If we have to + revoke the old child cert when generating the new one, we have to + generate a new child_cert_obj, so calling code that needs the + updated child_cert_obj must use the return value from this method. + """ + + ca = ca_detail.ca + child = self.child + + old_resources = self.cert.get_3779resources() + old_sia = self.cert.get_SIA() + old_aia = self.cert.get_AIA()[0] + old_ca_detail = self.ca_detail + + needed = False + + if resources is None: + resources = old_resources + + if sia is None: + sia = old_sia + + assert resources.valid_until is not None and old_resources.valid_until is not None + + if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6: + rpki.log.debug("Resources changed for %r: old %s new %s" % (self, old_resources, resources)) + needed = True + + if resources.valid_until != old_resources.valid_until: + rpki.log.debug("Validity changed for %r: old %s new %s" % ( + self, old_resources.valid_until, resources.valid_until)) + needed = True + + if sia != old_sia: + rpki.log.debug("SIA changed for %r: old %r new %r" % (self, old_sia, sia)) + needed = True + + if ca_detail != old_ca_detail: + rpki.log.debug("Issuer changed for %r: old %r new %r" % (self, old_ca_detail, ca_detail)) + needed = True + + if ca_detail.ca_cert_uri != old_aia: + rpki.log.debug("AIA changed for %r: old %r new %r" % (self, old_aia, ca_detail.ca_cert_uri)) + needed = True + + must_revoke = old_resources.oversized(resources) or old_resources.valid_until > resources.valid_until + if must_revoke: + rpki.log.debug("Must revoke any existing cert(s) for %r" % self) + needed = True + + if not needed and force: + rpki.log.debug("No change needed for %r, forcing reissuance anyway" % self) + needed = True + + if not needed: + rpki.log.debug("No change to %r" % self) + return self + + if must_revoke: + for x in child.fetch_child_certs(ca_detail = ca_detail, ski = self.ski): + rpki.log.debug("Revoking child_cert %r" % x) + x.revoke(publisher = publisher) + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + child_cert = ca_detail.issue( + ca = ca, + child = child, + subject_key = self.cert.getPublicKey(), + sia = sia, + resources = resources, + child_cert = None if must_revoke else self, + publisher = publisher) + + rpki.log.debug("New child_cert %r uri %s" % (child_cert, child_cert.uri)) + + return child_cert + + @classmethod + def fetch(cls, gctx = None, child = None, ca_detail = None, ski = None, unique = False, unpublished = None): + """ + Fetch all child_cert objects matching a particular set of + parameters. This is a wrapper to consolidate various queries that + would otherwise be inline SQL WHERE expressions. In most cases + code calls this indirectly, through methods in other classes. + """ + + args = [] + where = [] + + if child: + where.append("child_id = %s") + args.append(child.child_id) + + if ca_detail: + where.append("ca_detail_id = %s") + args.append(ca_detail.ca_detail_id) + + if ski: + where.append("ski = %s") + args.append(ski) + + if unpublished is not None: + where.append("published IS NOT NULL AND published < %s") + args.append(unpublished) + + where = " AND ".join(where) + + gctx = gctx or (child and child.gctx) or (ca_detail and ca_detail.gctx) or None + + if unique: + return cls.sql_fetch_where1(gctx, where, args) + else: + return cls.sql_fetch_where(gctx, where, args) + + def published_callback(self, pdu): + """ + Publication callback: check result and mark published. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + +class revoked_cert_obj(rpki.sql.sql_persistent): + """ + Tombstone for a revoked certificate. + """ + + sql_template = rpki.sql.template( + "revoked_cert", + "revoked_cert_id", + "serial", + "ca_detail_id", + ("revoked", rpki.sundial.datetime), + ("expires", rpki.sundial.datetime)) + + def __repr__(self): + return rpki.log.log_repr(self, repr(self.ca_detail), self.serial, self.revoked) + + def __init__(self, gctx = None, serial = None, revoked = None, expires = None, ca_detail_id = None): + """ + Initialize a revoked_cert_obj. + """ + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.serial = serial + self.revoked = revoked + self.expires = expires + self.ca_detail_id = ca_detail_id + if serial or revoked or expires or ca_detail_id: + self.sql_mark_dirty() + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this revoked_cert_obj links. + """ + return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @classmethod + def revoke(cls, cert, ca_detail): + """ + Revoke a certificate. + """ + return cls( + serial = cert.getSerial(), + expires = cert.getNotAfter(), + revoked = rpki.sundial.now(), + gctx = ca_detail.gctx, + ca_detail_id = ca_detail.ca_detail_id) + +class roa_obj(rpki.sql.sql_persistent): + """ + Route Origin Authorization. + """ + + sql_template = rpki.sql.template( + "roa", + "roa_id", + "ca_detail_id", + "self_id", + "asn", + ("roa", rpki.x509.ROA), + ("cert", rpki.x509.X509), + ("published", rpki.sundial.datetime)) + + ca_detail_id = None + cert = None + roa = None + published = None + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this roa_obj links. + """ + return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this roa_obj links. + """ + return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @ca_detail.deleter + def ca_detail(self): + try: + del self._ca_detail + except AttributeError: + pass + + def sql_fetch_hook(self): + """ + Extra SQL fetch actions for roa_obj -- handle prefix lists. + """ + for version, datatype, attribute in ((4, rpki.resource_set.roa_prefix_set_ipv4, "ipv4"), + (6, rpki.resource_set.roa_prefix_set_ipv6, "ipv6")): + setattr(self, attribute, datatype.from_sql( + self.gctx.sql, + """ + SELECT prefix, prefixlen, max_prefixlen FROM roa_prefix + WHERE roa_id = %s AND version = %s + """, + (self.roa_id, version))) + + def sql_insert_hook(self): + """ + Extra SQL insert actions for roa_obj -- handle prefix lists. + """ + for version, prefix_set in ((4, self.ipv4), (6, self.ipv6)): + if prefix_set: + self.gctx.sql.executemany( + """ + INSERT roa_prefix (roa_id, prefix, prefixlen, max_prefixlen, version) + VALUES (%s, %s, %s, %s, %s) + """, + ((self.roa_id, x.prefix, x.prefixlen, x.max_prefixlen, version) + for x in prefix_set)) + + def sql_delete_hook(self): + """ + Extra SQL delete actions for roa_obj -- handle prefix lists. + """ + self.gctx.sql.execute("DELETE FROM roa_prefix WHERE roa_id = %s", (self.roa_id,)) + + def __repr__(self): + args = [self, self.asn, self.ipv4, self.ipv6] + try: + args.append(self.uri) + except: + pass + return rpki.log.log_repr(*args) + + def __init__(self, gctx = None, self_id = None, asn = None, ipv4 = None, ipv6 = None): + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.self_id = self_id + self.asn = asn + self.ipv4 = ipv4 + self.ipv6 = ipv6 + + # Defer marking new ROA as dirty until .generate() has a chance to + # finish setup, otherwise we get SQL consistency errors. + # + #if self_id or asn or ipv4 or ipv6: self.sql_mark_dirty() + + def update(self, publisher, fast = False): + """ + Bring this roa_obj's ROA up to date if necesssary. + """ + + v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4() + v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6() + + if self.roa is None: + rpki.log.debug("%r doesn't exist, generating" % self) + return self.generate(publisher = publisher, fast = fast) + + ca_detail = self.ca_detail + + if ca_detail is None: + rpki.log.debug("%r has no associated ca_detail, generating" % self) + return self.generate(publisher = publisher, fast = fast) + + if ca_detail.state != "active": + rpki.log.debug("ca_detail associated with %r not active (state %s), regenerating" % (self, ca_detail.state)) + return self.regenerate(publisher = publisher, fast = fast) + + regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin) + + if rpki.sundial.now() > regen_time: + rpki.log.debug("%r past threshold %s, regenerating" % (self, regen_time)) + return self.regenerate(publisher = publisher, fast = fast) + + ca_resources = ca_detail.latest_ca_cert.get_3779resources() + ee_resources = self.cert.get_3779resources() + + if ee_resources.oversized(ca_resources): + rpki.log.debug("%r oversized with respect to CA, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + if ee_resources.v4 != v4 or ee_resources.v6 != v6: + rpki.log.debug("%r resources do not match EE, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + if self.cert.get_AIA()[0] != ca_detail.ca_cert_uri: + rpki.log.debug("%r AIA changed, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + def generate(self, publisher, fast = False): + """ + Generate a ROA. + + At present we have no way of performing a direct lookup from a + desired set of resources to a covering certificate, so we have to + search. This could be quite slow if we have a lot of active + ca_detail objects. Punt on the issue for now, revisit if + profiling shows this as a hotspot. + + Once we have the right covering certificate, we generate the ROA + payload, generate a new EE certificate, use the EE certificate to + sign the ROA payload, publish the result, then throw away the + private key for the EE cert, all per the ROA specification. This + implies that generating a lot of ROAs will tend to thrash + /dev/random, but there is not much we can do about that. + + If fast is set, we leave generating the new manifest for our + caller to handle, presumably at the end of a bulk operation. + """ + + if self.ipv4 is None and self.ipv6 is None: + raise rpki.exceptions.EmptyROAPrefixList + + # Ugly and expensive search for covering ca_detail, there has to + # be a better way, but it would require the ability to test for + # resource subsets in SQL. + + v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4() + v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6() + + ca_detail = self.ca_detail + if ca_detail is None or ca_detail.state != "active" or ca_detail.has_expired(): + rpki.log.debug("Searching for new ca_detail for ROA %r" % self) + ca_detail = None + for parent in self.self.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + assert ca_detail is None or ca_detail.state == "active" + if ca_detail is not None and not ca_detail.has_expired(): + resources = ca_detail.latest_ca_cert.get_3779resources() + if v4.issubset(resources.v4) and v6.issubset(resources.v6): + break + ca_detail = None + if ca_detail is not None: + break + else: + rpki.log.debug("Keeping old ca_detail for ROA %r" % self) + + if ca_detail is None: + raise rpki.exceptions.NoCoveringCertForROA, "Could not find a certificate covering %r" % self + + rpki.log.debug("Using new ca_detail %r for ROA %r, ca_detail_state %s" % ( + ca_detail, self, ca_detail.state)) + + ca = ca_detail.ca + resources = rpki.resource_set.resource_bag(v4 = v4, v6 = v6) + keypair = rpki.x509.RSA.generate() + + del self.ca_detail + self.ca_detail_id = ca_detail.ca_detail_id + self.cert = ca_detail.issue_ee( + ca = ca, + resources = resources, + subject_key = keypair.get_public(), + sia = (None, None, self.uri_from_key(keypair))) + self.roa = rpki.x509.ROA.build(self.asn, self.ipv4, self.ipv6, keypair, (self.cert,)) + self.published = rpki.sundial.now() + self.sql_store() + + rpki.log.debug("Generating %r URI %s" % (self, self.uri)) + publisher.publish( + cls = rpki.publication.roa_elt, + uri = self.uri, + obj = self.roa, + repository = ca.parent.repository, + handler = self.published_callback) + if not fast: + ca_detail.generate_manifest(publisher = publisher) + + + def published_callback(self, pdu): + """ + Check publication result. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + + def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False): + """ + Withdraw ROA associated with this roa_obj. + + In order to preserve make-before-break properties without + duplicating code, this method also handles generating a + replacement ROA when requested. + + If allow_failure is set, failing to withdraw the ROA will not be + considered an error. + + If fast is set, SQL actions will be deferred, on the assumption + that our caller will handle regenerating CRL and manifest and + flushing the SQL cache. + """ + + ca_detail = self.ca_detail + cert = self.cert + roa = self.roa + uri = self.uri + + rpki.log.debug("%s %r, ca_detail %r state is %s" % ( + "Regenerating" if regenerate else "Not regenerating", + self, ca_detail, ca_detail.state)) + + if regenerate: + self.generate(publisher = publisher, fast = fast) + + rpki.log.debug("Withdrawing %r %s and revoking its EE cert" % (self, uri)) + rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail) + publisher.withdraw(cls = rpki.publication.roa_elt, uri = uri, obj = roa, + repository = ca_detail.ca.parent.repository, + handler = False if allow_failure else None) + + if not regenerate: + self.sql_mark_deleted() + + if not fast: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + self.gctx.sql.sweep() + + def regenerate(self, publisher, fast = False): + """ + Reissue ROA associated with this roa_obj. + """ + if self.ca_detail is None: + self.generate(publisher = publisher, fast = fast) + else: + self.revoke(publisher = publisher, regenerate = True, fast = fast) + + def uri_from_key(self, key): + """ + Return publication URI for a public key. + """ + return self.ca_detail.ca.sia_uri + key.gSKI() + ".roa" + + @property + def uri(self): + """ + Return the publication URI for this roa_obj's ROA. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + @property + def uri_tail(self): + """ + Return the tail (filename portion) of the publication URI for this + roa_obj's ROA. + """ + return self.cert.gSKI() + ".roa" + + +class ghostbuster_obj(rpki.sql.sql_persistent): + """ + Ghostbusters record. + """ + + sql_template = rpki.sql.template( + "ghostbuster", + "ghostbuster_id", + "ca_detail_id", + "self_id", + "vcard", + ("ghostbuster", rpki.x509.Ghostbuster), + ("cert", rpki.x509.X509), + ("published", rpki.sundial.datetime)) + + ca_detail_id = None + cert = None + ghostbuster = None + published = None + vcard = None + + def __repr__(self): + args = [self] + try: + args.extend(self.vcard.splitlines()[2:-1]) + except: + pass + try: + args.append(self.uri) + except: + pass + return rpki.log.log_repr(*args) + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this ghostbuster_obj links. + """ + return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this ghostbuster_obj links. + """ + return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + def __init__(self, gctx = None, self_id = None, ca_detail_id = None, vcard = None): + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.self_id = self_id + self.ca_detail_id = ca_detail_id + self.vcard = vcard + + # Defer marking new ghostbuster as dirty until .generate() has a chance to + # finish setup, otherwise we get SQL consistency errors. + + def update(self, publisher, fast = False): + """ + Bring this ghostbuster_obj up to date if necesssary. + """ + + if self.ghostbuster is None: + rpki.log.debug("Ghostbuster record doesn't exist, generating") + return self.generate(publisher = publisher, fast = fast) + + regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin) + + if rpki.sundial.now() > regen_time: + rpki.log.debug("%r past threshold %s, regenerating" % (self, regen_time)) + return self.regenerate(publisher = publisher, fast = fast) + + if self.cert.get_AIA()[0] != self.ca_detail.ca_cert_uri: + rpki.log.debug("%r AIA changed, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + def generate(self, publisher, fast = False): + """ + Generate a Ghostbuster record + + Once we have the right covering certificate, we generate the + ghostbuster payload, generate a new EE certificate, use the EE + certificate to sign the ghostbuster payload, publish the result, + then throw away the private key for the EE cert. This is modeled + after the way we handle ROAs. + + If fast is set, we leave generating the new manifest for our + caller to handle, presumably at the end of a bulk operation. + """ + + ca_detail = self.ca_detail + ca = ca_detail.ca + + resources = rpki.resource_set.resource_bag.from_inheritance() + keypair = rpki.x509.RSA.generate() + + self.cert = ca_detail.issue_ee( + ca = ca, + resources = resources, + subject_key = keypair.get_public(), + sia = (None, None, self.uri_from_key(keypair))) + self.ghostbuster = rpki.x509.Ghostbuster.build(self.vcard, keypair, (self.cert,)) + self.published = rpki.sundial.now() + self.sql_store() + + rpki.log.debug("Generating Ghostbuster record %r" % self.uri) + publisher.publish( + cls = rpki.publication.ghostbuster_elt, + uri = self.uri, + obj = self.ghostbuster, + repository = ca.parent.repository, + handler = self.published_callback) + if not fast: + ca_detail.generate_manifest(publisher = publisher) + + def published_callback(self, pdu): + """ + Check publication result. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + + def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False): + """ + Withdraw Ghostbuster associated with this ghostbuster_obj. + + In order to preserve make-before-break properties without + duplicating code, this method also handles generating a + replacement ghostbuster when requested. + + If allow_failure is set, failing to withdraw the ghostbuster will not be + considered an error. + + If fast is set, SQL actions will be deferred, on the assumption + that our caller will handle regenerating CRL and manifest and + flushing the SQL cache. + """ + + ca_detail = self.ca_detail + cert = self.cert + ghostbuster = self.ghostbuster + uri = self.uri + + rpki.log.debug("%s %r, ca_detail %r state is %s" % ( + "Regenerating" if regenerate else "Not regenerating", + self, ca_detail, ca_detail.state)) + + if regenerate: + self.generate(publisher = publisher, fast = fast) + + rpki.log.debug("Withdrawing %r %s and revoking its EE cert" % (self, uri)) + rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail) + publisher.withdraw(cls = rpki.publication.ghostbuster_elt, uri = uri, obj = ghostbuster, + repository = ca_detail.ca.parent.repository, + handler = False if allow_failure else None) + + if not regenerate: + self.sql_mark_deleted() + + if not fast: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + self.gctx.sql.sweep() + + def regenerate(self, publisher, fast = False): + """ + Reissue Ghostbuster associated with this ghostbuster_obj. + """ + if self.ghostbuster is None: + self.generate(publisher = publisher, fast = fast) + else: + self.revoke(publisher = publisher, regenerate = True, fast = fast) + + def uri_from_key(self, key): + """ + Return publication URI for a public key. + """ + return self.ca_detail.ca.sia_uri + key.gSKI() + ".gbr" + + @property + def uri(self): + """ + Return the publication URI for this ghostbuster_obj's ghostbuster. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + @property + def uri_tail(self): + """ + Return the tail (filename portion) of the publication URI for this + ghostbuster_obj's ghostbuster. + """ + return self.cert.gSKI() + ".gbr" + + +class ee_cert_obj(rpki.sql.sql_persistent): + """ + EE certificate (router certificate or generic). + """ + + sql_template = rpki.sql.template( + "ee_cert", + "ee_cert_id", + "self_id", + "ca_detail_id", + "ski", + ("cert", rpki.x509.X509), + ("published", rpki.sundial.datetime)) + + def __repr__(self): + return rpki.log.log_repr(self, self.cert.getSubject(), self.uri) + + def __init__(self, gctx = None, self_id = None, ca_detail_id = None, cert = None): + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.self_id = self_id + self.ca_detail_id = ca_detail_id + self.cert = cert + self.ski = None if cert is None else cert.get_SKI() + self.published = None + if self_id or ca_detail_id or cert: + self.sql_mark_dirty() + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this ee_cert_obj links. + """ + return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this ee_cert_obj links. + """ + return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @ca_detail.deleter + def ca_detail(self): + try: + del self._ca_detail + except AttributeError: + pass + + @property + def gski(self): + """ + Calculate g(SKI), for ease of comparison with XML. + + Although, really, one has to ask why we don't just store g(SKI) + in rpkid.sql instead of ski.... + """ + return base64.urlsafe_b64encode(self.ski).rstrip("=") + + @gski.setter + def gski(self, val): + self.ski = base64.urlsafe_b64decode(s + ("=" * ((4 - len(s)) % 4))) + + @property + def uri(self): + """ + Return the publication URI for this ee_cert_obj. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + @property + def uri_tail(self): + """ + Return the tail (filename portion) of the publication URI for this + ee_cert_obj. + """ + return self.cert.gSKI() + ".cer" + + @classmethod + def create(cls, ca_detail, subject_name, subject_key, resources, publisher, eku = None): + """ + Generate a new certificate and stuff it in a new ee_cert_obj. + """ + + cn, sn = subject_name.extract_cn_and_sn() + ca = ca_detail.ca + + cert = ca_detail.issue_ee( + ca = ca, + subject_key = subject_key, + sia = None, + resources = resources, + notAfter = resources.valid_until, + cn = cn, + sn = sn, + eku = eku) + + self = cls( + gctx = ca_detail.gctx, + self_id = ca.parent.self.self_id, + ca_detail_id = ca_detail.ca_detail_id, + cert = cert) + + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca.parent.repository, + handler = self.published_callback) + + self.sql_store() + + ca_detail.generate_manifest(publisher = publisher) + + rpki.log.debug("New ee_cert %r" % self) + + return self + + def revoke(self, publisher, generate_crl_and_manifest = True): + """ + Revoke and withdraw an EE certificate. + """ + + ca_detail = self.ca_detail + ca = ca_detail.ca + rpki.log.debug("Revoking %r %r" % (self, self.uri)) + revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail) + publisher.withdraw(cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca.parent.repository) + self.gctx.sql.sweep() + self.sql_delete() + if generate_crl_and_manifest: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + def reissue(self, publisher, ca_detail = None, resources = None, force = False): + """ + Reissue an existing EE cert, reusing the public key. If the EE + cert we would generate is identical to the one we already have, we + just return; if we need to reissue, we reuse this ee_cert_obj and + just update its contents, as the publication URI will not have + changed. + """ + + needed = False + + old_cert = self.cert + + old_ca_detail = self.ca_detail + if ca_detail is None: + ca_detail = old_ca_detail + + assert ca_detail.ca is old_ca_detail.ca + + old_resources = old_cert.get_3779resources() + if resources is None: + resources = old_resources + + assert resources.valid_until is not None and old_resources.valid_until is not None + + assert ca_detail.covers(resources) + + if ca_detail != self.ca_detail: + rpki.log.debug("ca_detail changed for %r: old %r new %r" % ( + self, self.ca_detail, ca_detail)) + needed = True + + if ca_detail.ca_cert_uri != old_cert.get_AIA()[0]: + rpki.log.debug("AIA changed for %r: old %s new %s" % ( + self, old_cert.get_AIA()[0], ca_detail.ca_cert_uri)) + needed = True + + if resources.valid_until != old_resources.valid_until: + rpki.log.debug("Validity changed for %r: old %s new %s" % ( + self, old_resources.valid_until, resources.valid_until)) + needed = True + + if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6: + rpki.log.debug("Resources changed for %r: old %s new %s" % ( + self, old_resources, resources)) + needed = True + + must_revoke = (old_resources.oversized(resources) or + old_resources.valid_until > resources.valid_until) + if must_revoke: + rpki.log.debug("Must revoke existing cert(s) for %r" % self) + needed = True + + if not needed and force: + rpki.log.debug("No change needed for %r, forcing reissuance anyway" % self) + needed = True + + if not needed: + rpki.log.debug("No change to %r" % self) + return + + cn, sn = self.cert.getSubject().extract_cn_and_sn() + + self.cert = ca_detail.issue_ee( + ca = ca_detail.ca, + subject_key = self.cert.getPublicKey(), + eku = self.cert.get_EKU(), + sia = None, + resources = resources, + notAfter = resources.valid_until, + cn = cn, + sn = sn) + + self.sql_mark_dirty() + + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca_detail.ca.parent.repository, + handler = self.published_callback) + + if must_revoke: + revoked_cert_obj.revoke(cert = old_cert.cert, ca_detail = old_ca_detail) + + self.gctx.sql.sweep() + + if must_revoke: + ca_detail.generate_crl(publisher = publisher) + self.gctx.sql.sweep() + + ca_detail.generate_manifest(publisher = publisher) + + def published_callback(self, pdu): + """ + Publication callback: check result and mark published. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + + +class publication_queue(object): + """ + Utility to simplify publication from within rpkid. + + General idea here is to accumulate a collection of objects to be + published, in one or more repositories, each potentially with its + own completion callback. Eventually we want to publish everything + we've accumulated, at which point we need to iterate over the + collection and do repository.call_pubd() for each repository. + """ + + replace = True + + def __init__(self): + self.clear() + + def clear(self): + self.repositories = {} + self.msgs = {} + self.handlers = {} + if self.replace: + self.uris = {} + + def _add(self, uri, obj, repository, handler, make_pdu): + rid = id(repository) + if rid not in self.repositories: + self.repositories[rid] = repository + self.msgs[rid] = rpki.publication.msg.query() + if self.replace and uri in self.uris: + rpki.log.debug("Removing publication duplicate <%s %r %r>" % (self.uris[uri].action, self.uris[uri].uri, self.uris[uri].payload)) + self.msgs[rid].remove(self.uris.pop(uri)) + pdu = make_pdu(uri = uri, obj = obj) + if handler is not None: + self.handlers[id(pdu)] = handler + pdu.tag = id(pdu) + self.msgs[rid].append(pdu) + if self.replace: + self.uris[uri] = pdu + + def publish(self, cls, uri, obj, repository, handler = None): + return self._add( uri, obj, repository, handler, cls.make_publish) + + def withdraw(self, cls, uri, obj, repository, handler = None): + return self._add( uri, obj, repository, handler, cls.make_withdraw) + + def call_pubd(self, cb, eb): + def loop(iterator, rid): + rpki.log.debug("Calling pubd[%r]" % self.repositories[rid]) + self.repositories[rid].call_pubd(iterator, eb, self.msgs[rid], self.handlers) + def done(): + self.clear() + cb() + rpki.async.iterator(self.repositories, loop, done) + + @property + def size(self): + return sum(len(self.msgs[rid]) for rid in self.repositories) + + def empty(self): + assert (not self.msgs) == (self.size == 0) + return not self.msgs diff --git a/rpki/rpkid_tasks.py b/rpki/rpkid_tasks.py new file mode 100644 index 00000000..04e1c0df --- /dev/null +++ b/rpki/rpkid_tasks.py @@ -0,0 +1,750 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2012--2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +rpkid task objects. Split out from rpki.left_right and rpki.rpkid +because interactions with rpkid scheduler were getting too complicated. +""" + +import rpki.log +import rpki.rpkid +import rpki.async +import rpki.up_down +import rpki.sundial +import rpki.publication +import rpki.exceptions + +task_classes = () + +def queue_task(cls): + """ + Class decorator to add a new task class to task_classes. + """ + + global task_classes + task_classes += (cls,) + return cls + + +class CompletionHandler(object): + """ + Track one or more scheduled rpkid tasks and execute a callback when + the last of them terminates. + """ + + ## @var debug + # Debug logging. + + debug = False + + def __init__(self, cb): + self.cb = cb + self.tasks = set() + + def register(self, task): + if self.debug: + rpki.log.debug("Completion handler %r registering task %r" % (self, task)) + self.tasks.add(task) + task.register_completion(self.done) + + def done(self, task): + try: + self.tasks.remove(task) + except KeyError: + rpki.log.warn("Completion handler %r called with unregistered task %r, blundering onwards" % (self, task)) + else: + if self.debug: + rpki.log.debug("Completion handler %r called with registered task %r" % (self, task)) + if not self.tasks: + if self.debug: + rpki.log.debug("Completion handler %r finished, calling %r" % (self, self.cb)) + self.cb() + + @property + def count(self): + return len(self.tasks) + + +class AbstractTask(object): + """ + Abstract base class for rpkid scheduler task objects. This just + handles the scheduler hooks, real work starts in self.start. + + NB: This assumes that the rpki.rpkid.rpkid.task_* methods have been + rewritten to expect instances of subclasses of this class, rather + than expecting thunks to be wrapped up in the older version of this + class. Rewrite, rewrite, remove this comment when done, OK! + """ + + ## @var timeslice + # How long before a task really should consider yielding the CPU to + # let something else run. + + timeslice = rpki.sundial.timedelta(seconds = 15) + + def __init__(self, s, description = None): + self.self = s + self.description = description + self.completions = [] + self.continuation = None + self.due_date = None + self.clear() + + def __repr__(self): + return rpki.log.log_repr(self, self.description) + + def register_completion(self, completion): + self.completions.append(completion) + + def exit(self): + while self.completions: + self.completions.pop(0)(self) + self.clear() + self.due_date = None + self.self.gctx.task_next() + + def postpone(self, continuation): + self.continuation = continuation + self.due_date = None + self.self.gctx.task_add(self) + self.self.gctx.task_next() + + def __call__(self): + self.due_date = rpki.sundial.now() + self.timeslice + if self.continuation is None: + rpki.log.debug("Running task %r" % self) + self.clear() + self.start() + else: + rpki.log.debug("Restarting task %r at %r" % (self, self.continuation)) + continuation = self.continuation + self.continuation = None + continuation() + + @property + def overdue(self): + return rpki.sundial.now() > self.due_date + + def __getattr__(self, name): + return getattr(self.self, name) + + def start(self): + raise NotImplementedError + + def clear(self): + pass + + +@queue_task +class PollParentTask(AbstractTask): + """ + Run the regular client poll cycle with each of this self's + parents, in turn. + """ + + def clear(self): + self.parent_iterator = None + self.parent = None + self.ca_map = None + self.class_iterator = None + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] polling parents" % (self.self_handle, self.self_id)) + rpki.async.iterator(self.parents, self.parent_loop, self.exit) + + def parent_loop(self, parent_iterator, parent): + self.parent_iterator = parent_iterator + self.parent = parent + rpki.up_down.list_pdu.query(parent, self.got_list, self.list_failed) + + def got_list(self, r_msg): + self.ca_map = dict((ca.parent_resource_class, ca) for ca in self.parent.cas) + self.gctx.checkpoint() + rpki.async.iterator(r_msg.payload.classes, self.class_loop, self.class_done) + + def list_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't get resource class list from parent %r, skipping: %s (%r)" % ( + self.parent, e, e)) + self.parent_iterator() + + def class_loop(self, class_iterator, rc): + self.gctx.checkpoint() + self.class_iterator = class_iterator + try: + ca = self.ca_map.pop(rc.class_name) + except KeyError: + rpki.rpkid.ca_obj.create(self.parent, rc, class_iterator, self.class_create_failed) + else: + ca.check_for_updates(self.parent, rc, class_iterator, self.class_update_failed) + + def class_update_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't update class, skipping: %s" % e) + self.class_iterator() + + def class_create_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't create class, skipping: %s" % e) + self.class_iterator() + + def class_done(self): + rpki.async.iterator(self.ca_map.values(), self.ca_loop, self.ca_done) + + def ca_loop(self, iterator, ca): + self.gctx.checkpoint() + ca.delete(self.parent, iterator) + + def ca_done(self): + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.parent_iterator() + + +@queue_task +class UpdateChildrenTask(AbstractTask): + """ + Check for updated IRDB data for all of this self's children and + issue new certs as necessary. Must handle changes both in + resources and in expiration date. + """ + + def clear(self): + self.now = None + self.rsn = None + self.publisher = None + self.iterator = None + self.child = None + self.child_certs = None + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] updating children" % (self.self_handle, self.self_id)) + self.now = rpki.sundial.now() + self.rsn = self.now + rpki.sundial.timedelta(seconds = self.regen_margin) + self.publisher = rpki.rpkid.publication_queue() + rpki.async.iterator(self.children, self.loop, self.done) + + def loop(self, iterator, child): + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.iterator = iterator + self.child = child + self.child_certs = child.child_certs + if self.overdue: + self.publisher.call_pubd(lambda: self.postpone(self.do_child), self.publication_failed) + else: + self.do_child() + + def do_child(self): + if self.child_certs: + self.gctx.irdb_query_child_resources(self.child.self.self_handle, self.child.child_handle, + self.got_resources, self.lose) + else: + self.iterator() + + def lose(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't update child %r, skipping: %s" % (self.child, e)) + self.iterator() + + def got_resources(self, irdb_resources): + try: + for child_cert in self.child_certs: + ca_detail = child_cert.ca_detail + ca = ca_detail.ca + if ca_detail.state == "active": + old_resources = child_cert.cert.get_3779resources() + new_resources = old_resources & irdb_resources & ca_detail.latest_ca_cert.get_3779resources() + old_aia = child_cert.cert.get_AIA()[0] + new_aia = ca_detail.ca_cert_uri + + if new_resources.empty(): + rpki.log.debug("Resources shrank to the null set, " + "revoking and withdrawing child %s certificate SKI %s" % ( + self.child.child_handle, child_cert.cert.gSKI())) + child_cert.revoke(publisher = self.publisher) + ca_detail.generate_crl(publisher = self.publisher) + ca_detail.generate_manifest(publisher = self.publisher) + + elif (old_resources != new_resources or + old_aia != new_aia or + (old_resources.valid_until < self.rsn and + irdb_resources.valid_until > self.now and + old_resources.valid_until != irdb_resources.valid_until)): + + rpki.log.debug("Need to reissue child %s certificate SKI %s" % ( + self.child.child_handle, child_cert.cert.gSKI())) + if old_resources != new_resources: + rpki.log.debug("Child %s SKI %s resources changed: old %s new %s" % ( + self.child.child_handle, child_cert.cert.gSKI(), old_resources, new_resources)) + if old_resources.valid_until != irdb_resources.valid_until: + rpki.log.debug("Child %s SKI %s validity changed: old %s new %s" % ( + self.child.child_handle, child_cert.cert.gSKI(), + old_resources.valid_until, irdb_resources.valid_until)) + + new_resources.valid_until = irdb_resources.valid_until + child_cert.reissue( + ca_detail = ca_detail, + resources = new_resources, + publisher = self.publisher) + + elif old_resources.valid_until < self.now: + rpki.log.debug("Child %s certificate SKI %s has expired: cert.valid_until %s, irdb.valid_until %s" + % (self.child.child_handle, child_cert.cert.gSKI(), + old_resources.valid_until, irdb_resources.valid_until)) + child_cert.sql_delete() + self.publisher.withdraw( + cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = ca.parent.repository) + ca_detail.generate_manifest(publisher = self.publisher) + + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + self.gctx.checkpoint() + self.lose(e) + else: + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.iterator() + + def done(self): + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.publisher.call_pubd(self.exit, self.publication_failed) + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + +@queue_task +class UpdateROAsTask(AbstractTask): + """ + Generate or update ROAs for this self. + """ + + def clear(self): + self.orphans = None + self.updates = None + self.publisher = None + self.ca_details = None + self.count = None + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + self.gctx.sql.sweep() + rpki.log.debug("Self %s[%d] updating ROAs" % (self.self_handle, self.self_id)) + + rpki.log.debug("Issuing query for ROA requests") + self.gctx.irdb_query_roa_requests(self.self_handle, self.got_roa_requests, self.roa_requests_failed) + + def got_roa_requests(self, roa_requests): + self.gctx.checkpoint() + rpki.log.debug("Received response to query for ROA requests") + + if self.gctx.sql.dirty: + rpki.log.warn("Unexpected dirty SQL cache, flushing") + self.gctx.sql.sweep() + + roas = {} + seen = set() + self.orphans = [] + self.updates = [] + self.publisher = rpki.rpkid.publication_queue() + self.ca_details = set() + + for roa in self.roas: + k = (roa.asn, str(roa.ipv4), str(roa.ipv6)) + if k not in roas: + roas[k] = roa + elif (roa.roa is not None and roa.cert is not None and roa.ca_detail is not None and roa.ca_detail.state == "active" and + (roas[k].roa is None or roas[k].cert is None or roas[k].ca_detail is None or roas[k].ca_detail.state != "active")): + self.orphans.append(roas[k]) + roas[k] = roa + else: + self.orphans.append(roa) + + for roa_request in roa_requests: + k = (roa_request.asn, str(roa_request.ipv4), str(roa_request.ipv6)) + if k in seen: + rpki.log.warn("Skipping duplicate ROA request %r" % roa_request) + else: + seen.add(k) + roa = roas.pop(k, None) + if roa is None: + roa = rpki.rpkid.roa_obj(self.gctx, self.self_id, roa_request.asn, roa_request.ipv4, roa_request.ipv6) + rpki.log.debug("Created new %r" % roa) + else: + rpki.log.debug("Found existing %r" % roa) + self.updates.append(roa) + + self.orphans.extend(roas.itervalues()) + + if self.overdue: + self.postpone(self.begin_loop) + else: + self.begin_loop() + + def begin_loop(self): + self.count = 0 + rpki.async.iterator(self.updates, self.loop, self.done, pop_list = True) + + def loop(self, iterator, roa): + self.gctx.checkpoint() + try: + roa.update(publisher = self.publisher, fast = True) + self.ca_details.add(roa.ca_detail) + self.gctx.sql.sweep() + except (SystemExit, rpki.async.ExitNow): + raise + except rpki.exceptions.NoCoveringCertForROA: + rpki.log.warn("No covering certificate for %r, skipping" % roa) + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not update %r, skipping: %s" % (roa, e)) + self.count += 1 + if self.overdue: + self.publish(lambda: self.postpone(iterator)) + else: + iterator() + + def publish(self, done): + if not self.publisher.empty(): + for ca_detail in self.ca_details: + rpki.log.debug("Generating new CRL for %r" % ca_detail) + ca_detail.generate_crl(publisher = self.publisher) + rpki.log.debug("Generating new manifest for %r" % ca_detail) + ca_detail.generate_manifest(publisher = self.publisher) + self.ca_details.clear() + self.gctx.sql.sweep() + self.gctx.checkpoint() + self.publisher.call_pubd(done, self.publication_failed) + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + def done(self): + for roa in self.orphans: + try: + self.ca_details.add(roa.ca_detail) + roa.revoke(publisher = self.publisher, fast = True) + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not revoke %r: %s" % (roa, e)) + self.gctx.sql.sweep() + self.gctx.checkpoint() + self.publish(self.exit) + + def roa_requests_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Could not fetch ROA requests for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + +@queue_task +class UpdateGhostbustersTask(AbstractTask): + """ + Generate or update Ghostbuster records for this self. + + This was originally based on the ROA update code. It's possible + that both could benefit from refactoring, but at this point the + potential scaling issues for ROAs completely dominate structure of + the ROA code, and aren't relevant here unless someone is being + exceptionally silly. + """ + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] updating Ghostbuster records" % (self.self_handle, self.self_id)) + + self.gctx.irdb_query_ghostbuster_requests(self.self_handle, + (p.parent_handle for p in self.parents), + self.got_ghostbuster_requests, + self.ghostbuster_requests_failed) + + def got_ghostbuster_requests(self, ghostbuster_requests): + + try: + self.gctx.checkpoint() + if self.gctx.sql.dirty: + rpki.log.warn("Unexpected dirty SQL cache, flushing") + self.gctx.sql.sweep() + + ghostbusters = {} + orphans = [] + publisher = rpki.rpkid.publication_queue() + ca_details = set() + seen = set() + + parents = dict((p.parent_handle, p) for p in self.parents) + + for ghostbuster in self.ghostbusters: + k = (ghostbuster.ca_detail_id, ghostbuster.vcard) + if ghostbuster.ca_detail.state != "active" or k in ghostbusters: + orphans.append(ghostbuster) + else: + ghostbusters[k] = ghostbuster + + for ghostbuster_request in ghostbuster_requests: + if ghostbuster_request.parent_handle not in parents: + rpki.log.warn("Unknown parent_handle %r in Ghostbuster request, skipping" % ghostbuster_request.parent_handle) + continue + k = (ghostbuster_request.parent_handle, ghostbuster_request.vcard) + if k in seen: + rpki.log.warn("Skipping duplicate Ghostbuster request %r" % ghostbuster_request) + continue + seen.add(k) + for ca in parents[ghostbuster_request.parent_handle].cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + ghostbuster = ghostbusters.pop((ca_detail.ca_detail_id, ghostbuster_request.vcard), None) + if ghostbuster is None: + ghostbuster = rpki.rpkid.ghostbuster_obj(self.gctx, self.self_id, ca_detail.ca_detail_id, ghostbuster_request.vcard) + rpki.log.debug("Created new %r for %r" % (ghostbuster, ghostbuster_request.parent_handle)) + else: + rpki.log.debug("Found existing %r for %s" % (ghostbuster, ghostbuster_request.parent_handle)) + ghostbuster.update(publisher = publisher, fast = True) + ca_details.add(ca_detail) + + orphans.extend(ghostbusters.itervalues()) + for ghostbuster in orphans: + ca_details.add(ghostbuster.ca_detail) + ghostbuster.revoke(publisher = publisher, fast = True) + + for ca_detail in ca_details: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + self.gctx.sql.sweep() + + self.gctx.checkpoint() + publisher.call_pubd(self.exit, self.publication_failed) + + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not update Ghostbuster records for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish Ghostbuster updates for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + def ghostbuster_requests_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Could not fetch Ghostbuster record requests for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + +@queue_task +class UpdateEECertificatesTask(AbstractTask): + """ + Generate or update EE certificates for this self. + + Not yet sure what kind of scaling constraints this task might have, + so keeping it simple for initial version, we can optimize later. + """ + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] updating EE certificates" % (self.self_handle, self.self_id)) + + self.gctx.irdb_query_ee_certificate_requests(self.self_handle, + self.got_requests, + self.get_requests_failed) + + def got_requests(self, requests): + + try: + self.gctx.checkpoint() + if self.gctx.sql.dirty: + rpki.log.warn("Unexpected dirty SQL cache, flushing") + self.gctx.sql.sweep() + + publisher = rpki.rpkid.publication_queue() + + existing = dict() + for ee in self.ee_certificates: + gski = ee.gski + if gski not in existing: + existing[gski] = set() + existing[gski].add(ee) + + ca_details = set() + + for req in requests: + ees = existing.pop(req.gski, ()) + resources = rpki.resource_set.resource_bag( + asn = req.asn, + v4 = req.ipv4, + v6 = req.ipv6, + valid_until = req.valid_until) + covering = self.find_covering_ca_details(resources) + ca_details.update(covering) + + for ee in ees: + if ee.ca_detail in covering: + rpki.log.debug("Updating existing EE certificate for %s %s" % (req.gski, resources)) + ee.reissue( + resources = resources, + publisher = publisher) + covering.remove(ee.ca_detail) + else: + rpki.log.debug("Existing EE certificate for %s %s is no longer covered" % (req.gski, resources)) + ee.revoke(publisher = publisher) + + for ca_detail in covering: + rpki.log.debug("No existing EE certificate for %s %s" % (req.gski, resources)) + rpki.rpkid.ee_cert_obj.create( + ca_detail = ca_detail, + subject_name = rpki.x509.X501DN.from_cn(req.cn, req.sn), + subject_key = req.pkcs10.getPublicKey(), + resources = resources, + publisher = publisher, + eku = req.eku or None) + + # Anything left is an orphan + for ees in existing.values(): + for ee in ees: + ca_details.add(ee.ca_detail) + ee.revoke(publisher = publisher) + + self.gctx.sql.sweep() + + for ca_detail in ca_details: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + self.gctx.sql.sweep() + + self.gctx.checkpoint() + publisher.call_pubd(self.exit, self.publication_failed) + + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not update EE certificates for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish EE certificate updates for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + def get_requests_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Could not fetch EE certificate requests for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + +@queue_task +class RegenerateCRLsAndManifestsTask(AbstractTask): + """ + Generate new CRLs and manifests as necessary for all of this self's + CAs. Extracting nextUpdate from a manifest is hard at the moment + due to implementation silliness, so for now we generate a new + manifest whenever we generate a new CRL + + This code also cleans up tombstones left behind by revoked ca_detail + objects, since we're walking through the relevant portions of the + database anyway. + """ + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] regenerating CRLs and manifests" % (self.self_handle, self.self_id)) + + now = rpki.sundial.now() + crl_interval = rpki.sundial.timedelta(seconds = self.crl_interval) + regen_margin = max(self.gctx.cron_period * 2, crl_interval / 4) + publisher = rpki.rpkid.publication_queue() + + for parent in self.parents: + for ca in parent.cas: + try: + for ca_detail in ca.revoked_ca_details: + if now > ca_detail.latest_crl.getNextUpdate(): + ca_detail.delete(ca = ca, publisher = publisher) + for ca_detail in ca.active_or_deprecated_ca_details: + if now + regen_margin > ca_detail.latest_crl.getNextUpdate(): + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Couldn't regenerate CRLs and manifests for CA %r, skipping: %s" % (ca, e)) + + self.gctx.checkpoint() + self.gctx.sql.sweep() + publisher.call_pubd(self.exit, self.lose) + + def lose(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish updated CRLs and manifests for self %r, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + +@queue_task +class CheckFailedPublication(AbstractTask): + """ + Periodic check for objects we tried to publish but failed (eg, due + to pubd being down or unreachable). + """ + + def start(self): + rpki.log.trace() + publisher = rpki.rpkid.publication_queue() + for parent in self.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + ca_detail.check_failed_publication(publisher) + self.gctx.checkpoint() + self.gctx.sql.sweep() + publisher.call_pubd(self.exit, self.publication_failed) + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() diff --git a/rpki/sql.py b/rpki/sql.py new file mode 100644 index 00000000..c753278c --- /dev/null +++ b/rpki/sql.py @@ -0,0 +1,424 @@ +# $Id$ +# +# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +SQL interface code. +""" + +import weakref + +from rpki.mysql_import import (MySQLdb, _mysql_exceptions) + +import rpki.x509 +import rpki.resource_set +import rpki.sundial +import rpki.log + +class session(object): + """ + SQL session layer. + """ + + ## @var ping_threshold + # Timeout after which we should issue a ping command before the real + # one. Intent is to keep the MySQL connection alive without pinging + # before every single command. + + ping_threshold = rpki.sundial.timedelta(seconds = 60) + + def __init__(self, cfg): + + self.username = cfg.get("sql-username") + self.database = cfg.get("sql-database") + self.password = cfg.get("sql-password") + + self.conv = MySQLdb.converters.conversions.copy() + self.conv.update({ + rpki.sundial.datetime : MySQLdb.converters.DateTime2literal, + MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None }) + + self.cache = weakref.WeakValueDictionary() + self.dirty = set() + + self.connect() + + def connect(self): + self.db = MySQLdb.connect(user = self.username, + db = self.database, + passwd = self.password, + conv = self.conv) + self.cur = self.db.cursor() + self.db.autocommit(True) + self.timestamp = rpki.sundial.now() + + def close(self): + if self.cur: + self.cur.close() + self.cur = None + if self.db: + self.db.close() + self.db = None + + def _wrap_execute(self, func, query, args): + try: + now = rpki.sundial.now() + if now > self.timestamp + self.ping_threshold: + self.db.ping(True) + self.timestamp = now + return func(query, args) + except _mysql_exceptions.MySQLError: + if self.dirty: + rpki.log.warn("MySQL exception with dirty objects in SQL cache!") + raise + + def execute(self, query, args = None): + return self._wrap_execute(self.cur.execute, query, args) + + def executemany(self, query, args): + return self._wrap_execute(self.cur.executemany, query, args) + + def fetchall(self): + return self.cur.fetchall() + + def lastrowid(self): + return self.cur.lastrowid + + def cache_clear(self): + """ + Clear the SQL object cache. Shouldn't be necessary now that the + cache uses weak references, but should be harmless. + """ + rpki.log.debug("Clearing SQL cache") + self.assert_pristine() + self.cache.clear() + + def assert_pristine(self): + """ + Assert that there are no dirty objects in the cache. + """ + assert not self.dirty, "Dirty objects in SQL cache: %s" % self.dirty + + def sweep(self): + """ + Write any dirty objects out to SQL. + """ + for s in self.dirty.copy(): + #if s.sql_cache_debug: + rpki.log.debug("Sweeping (%s) %r" % ("deleting" if s.sql_deleted else "storing", s)) + if s.sql_deleted: + s.sql_delete() + else: + s.sql_store() + self.assert_pristine() + +class template(object): + """ + SQL template generator. + """ + + def __init__(self, table_name, index_column, *data_columns): + """ + Build a SQL template. + """ + type_map = dict((x[0], x[1]) for x in data_columns if isinstance(x, tuple)) + data_columns = tuple(isinstance(x, tuple) and x[0] or x for x in data_columns) + columns = (index_column,) + data_columns + self.table = table_name + self.index = index_column + self.columns = columns + self.map = type_map + self.select = "SELECT %s FROM %s" % (", ".join("%s.%s" % (table_name, c) for c in columns), table_name) + self.insert = "INSERT %s (%s) VALUES (%s)" % (table_name, + ", ".join(data_columns), + ", ".join("%(" + s + ")s" for s in data_columns)) + self.update = "UPDATE %s SET %s WHERE %s = %%(%s)s" % (table_name, + ", ".join(s + " = %(" + s + ")s" for s in data_columns), + index_column, + index_column) + self.delete = "DELETE FROM %s WHERE %s = %%s" % (table_name, index_column) + +class sql_persistent(object): + """ + Mixin for persistent class that needs to be stored in SQL. + """ + + ## @var sql_in_db + # Whether this object is already in SQL or not. + + sql_in_db = False + + ## @var sql_deleted + # Whether our cached copy of this object has been deleted. + + sql_deleted = False + + ## @var sql_debug + # Enable logging of SQL actions + + sql_debug = False + + ## @var sql_cache_debug + # Enable debugging of SQL cache actions + + sql_cache_debug = False + + @classmethod + def sql_fetch(cls, gctx, id): # pylint: disable=W0622 + """ + Fetch one object from SQL, based on its primary key. + + Since in this one case we know that the primary index is also the + cache key, we check for a cache hit directly in the hope of + bypassing the SQL lookup entirely. + + This method is usually called via a one-line class-specific + wrapper. As a convenience, we also accept an id of None, and just + return None in this case. + """ + + if id is None: + return None + assert isinstance(id, (int, long)), "id should be an integer, was %r" % type(id) + key = (cls, id) + if key in gctx.sql.cache: + return gctx.sql.cache[key] + else: + return cls.sql_fetch_where1(gctx, "%s = %%s" % cls.sql_template.index, (id,)) + + @classmethod + def sql_fetch_where1(cls, gctx, where, args = None, also_from = None): + """ + Fetch one object from SQL, based on an arbitrary SQL WHERE expression. + """ + results = cls.sql_fetch_where(gctx, where, args, also_from) + if len(results) == 0: + return None + elif len(results) == 1: + return results[0] + else: + raise rpki.exceptions.DBConsistancyError, \ + "Database contained multiple matches for %s where %s: %r" % \ + (cls.__name__, where % tuple(repr(a) for a in args), results) + + @classmethod + def sql_fetch_all(cls, gctx): + """ + Fetch all objects of this type from SQL. + """ + return cls.sql_fetch_where(gctx, None) + + @classmethod + def sql_fetch_where(cls, gctx, where, args = None, also_from = None): + """ + Fetch objects of this type matching an arbitrary SQL WHERE expression. + """ + if where is None: + assert args is None and also_from is None + if cls.sql_debug: + rpki.log.debug("sql_fetch_where(%r)" % cls.sql_template.select) + gctx.sql.execute(cls.sql_template.select) + else: + query = cls.sql_template.select + if also_from is not None: + query += "," + also_from + query += " WHERE " + where + if cls.sql_debug: + rpki.log.debug("sql_fetch_where(%r, %r)" % (query, args)) + gctx.sql.execute(query, args) + results = [] + for row in gctx.sql.fetchall(): + key = (cls, row[0]) + if key in gctx.sql.cache: + results.append(gctx.sql.cache[key]) + else: + results.append(cls.sql_init(gctx, row, key)) + return results + + @classmethod + def sql_init(cls, gctx, row, key): + """ + Initialize one Python object from the result of a SQL query. + """ + self = cls() + self.gctx = gctx + self.sql_decode(dict(zip(cls.sql_template.columns, row))) + gctx.sql.cache[key] = self + self.sql_in_db = True + self.sql_fetch_hook() + return self + + def sql_mark_dirty(self): + """ + Mark this object as needing to be written back to SQL. + """ + if self.sql_cache_debug and not self.sql_is_dirty: + rpki.log.debug("Marking %r SQL dirty" % self) + self.gctx.sql.dirty.add(self) + + def sql_mark_clean(self): + """ + Mark this object as not needing to be written back to SQL. + """ + if self.sql_cache_debug and self.sql_is_dirty: + rpki.log.debug("Marking %r SQL clean" % self) + self.gctx.sql.dirty.discard(self) + + @property + def sql_is_dirty(self): + """ + Query whether this object needs to be written back to SQL. + """ + return self in self.gctx.sql.dirty + + def sql_mark_deleted(self): + """ + Mark this object as needing to be deleted in SQL. + """ + self.sql_deleted = True + self.sql_mark_dirty() + + def sql_store(self): + """ + Store this object to SQL. + """ + args = self.sql_encode() + if not self.sql_in_db: + if self.sql_debug: + rpki.log.debug("sql_store(%r, %r)" % (self.sql_template.insert, args)) + self.gctx.sql.execute(self.sql_template.insert, args) + setattr(self, self.sql_template.index, self.gctx.sql.lastrowid()) + self.gctx.sql.cache[(self.__class__, self.gctx.sql.lastrowid())] = self + self.sql_insert_hook() + else: + if self.sql_debug: + rpki.log.debug("sql_store(%r, %r)" % (self.sql_template.update, args)) + self.gctx.sql.execute(self.sql_template.update, args) + self.sql_update_hook() + key = (self.__class__, getattr(self, self.sql_template.index)) + assert key in self.gctx.sql.cache and self.gctx.sql.cache[key] == self + self.sql_mark_clean() + self.sql_in_db = True + + def sql_delete(self): + """ + Delete this object from SQL. + """ + if self.sql_in_db: + id = getattr(self, self.sql_template.index) # pylint: disable=W0622 + if self.sql_debug: + rpki.log.debug("sql_delete(%r, %r)" % (self.sql_template.delete, id)) + self.sql_delete_hook() + self.gctx.sql.execute(self.sql_template.delete, id) + key = (self.__class__, id) + if self.gctx.sql.cache.get(key) == self: + del self.gctx.sql.cache[key] + self.sql_in_db = False + self.sql_mark_clean() + + def sql_encode(self): + """ + Convert object attributes into a dict for use with canned SQL + queries. This is a default version that assumes a one-to-one + mapping between column names in SQL and attribute names in Python. + If you need something fancier, override this. + """ + d = dict((a, getattr(self, a, None)) for a in self.sql_template.columns) + for i in self.sql_template.map: + if d.get(i) is not None: + d[i] = self.sql_template.map[i].to_sql(d[i]) + return d + + def sql_decode(self, vals): + """ + Initialize an object with values returned by self.sql_fetch(). + This is a default version that assumes a one-to-one mapping + between column names in SQL and attribute names in Python. If you + need something fancier, override this. + """ + for a in self.sql_template.columns: + if vals.get(a) is not None and a in self.sql_template.map: + setattr(self, a, self.sql_template.map[a].from_sql(vals[a])) + else: + setattr(self, a, vals[a]) + + def sql_fetch_hook(self): + """ + Customization hook. + """ + pass + + def sql_insert_hook(self): + """ + Customization hook. + """ + pass + + def sql_update_hook(self): + """ + Customization hook. + """ + self.sql_delete_hook() + self.sql_insert_hook() + + def sql_delete_hook(self): + """ + Customization hook. + """ + pass + + +def cache_reference(func): + """ + Decorator for use with property methods which just do an SQL lookup based on an ID. + Check for an existing reference to the object, just return that if we find it, + otherwise perform the SQL lookup. + + Not 100% certain this is a good idea, but I //think// it should work well with the + current weak reference SQL cache, so long as we create no circular references. + So don't do that. + """ + + attr_name = "_" + func.__name__ + + def wrapped(self): + try: + value = getattr(self, attr_name) + assert value is not None + except AttributeError: + value = func(self) + if value is not None: + setattr(self, attr_name, value) + return value + + wrapped.__name__ = func.__name__ + wrapped.__doc__ = func.__doc__ + wrapped.__dict__.update(func.__dict__) + + return wrapped diff --git a/rpki/sql_schemas.py b/rpki/sql_schemas.py new file mode 100644 index 00000000..e57c7a7f --- /dev/null +++ b/rpki/sql_schemas.py @@ -0,0 +1,319 @@ +# Automatically generated, do not edit. + +## @var rpkid +## SQL schema rpkid +rpkid = '''-- $Id: rpkid.sql 5753 2014-04-05 19:24:26Z sra $ + +-- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- SQL objects needed by the RPKI engine (rpkid.py). + +-- DROP TABLE commands must be in correct (reverse dependency) order +-- to satisfy FOREIGN KEY constraints. + +DROP TABLE IF EXISTS ee_cert; +DROP TABLE IF EXISTS ghostbuster; +DROP TABLE IF EXISTS roa_prefix; +DROP TABLE IF EXISTS roa; +DROP TABLE IF EXISTS revoked_cert; +DROP TABLE IF EXISTS child_cert; +DROP TABLE IF EXISTS child; +DROP TABLE IF EXISTS ca_detail; +DROP TABLE IF EXISTS ca; +DROP TABLE IF EXISTS parent; +DROP TABLE IF EXISTS repository; +DROP TABLE IF EXISTS bsc; +DROP TABLE IF EXISTS self; + +CREATE TABLE self ( + self_id SERIAL NOT NULL, + self_handle VARCHAR(255) NOT NULL, + use_hsm BOOLEAN NOT NULL DEFAULT FALSE, + crl_interval BIGINT UNSIGNED, + regen_margin BIGINT UNSIGNED, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + PRIMARY KEY (self_id), + UNIQUE (self_handle) +) ENGINE=InnoDB; + +CREATE TABLE bsc ( + bsc_id SERIAL NOT NULL, + bsc_handle VARCHAR(255) NOT NULL, + private_key_id LONGBLOB, + pkcs10_request LONGBLOB, + hash_alg ENUM ('sha256'), + signing_cert LONGBLOB, + signing_cert_crl LONGBLOB, + self_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (bsc_id), + CONSTRAINT bsc_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, bsc_handle) +) ENGINE=InnoDB; + +CREATE TABLE repository ( + repository_id SERIAL NOT NULL, + repository_handle VARCHAR(255) NOT NULL, + peer_contact_uri TEXT, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + bsc_id BIGINT UNSIGNED NOT NULL, + self_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (repository_id), + CONSTRAINT repository_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT repository_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + UNIQUE (self_id, repository_handle) +) ENGINE=InnoDB; + +CREATE TABLE parent ( + parent_id SERIAL NOT NULL, + parent_handle VARCHAR(255) NOT NULL, + bpki_cms_cert LONGBLOB, + bpki_cms_glue LONGBLOB, + peer_contact_uri TEXT, + sia_base TEXT, + sender_name TEXT, + recipient_name TEXT, + last_cms_timestamp DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + bsc_id BIGINT UNSIGNED NOT NULL, + repository_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (parent_id), + CONSTRAINT parent_repository_id + FOREIGN KEY (repository_id) REFERENCES repository (repository_id) ON DELETE CASCADE, + CONSTRAINT parent_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + CONSTRAINT parent_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, parent_handle) +) ENGINE=InnoDB; + +CREATE TABLE ca ( + ca_id SERIAL NOT NULL, + last_crl_sn BIGINT UNSIGNED NOT NULL, + last_manifest_sn BIGINT UNSIGNED NOT NULL, + next_manifest_update DATETIME, + next_crl_update DATETIME, + last_issued_sn BIGINT UNSIGNED NOT NULL, + sia_uri TEXT, + parent_resource_class TEXT, + parent_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ca_id), + CONSTRAINT ca_parent_id + FOREIGN KEY (parent_id) REFERENCES parent (parent_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ca_detail ( + ca_detail_id SERIAL NOT NULL, + public_key LONGBLOB, + private_key_id LONGBLOB, + latest_crl LONGBLOB, + crl_published DATETIME, + latest_ca_cert LONGBLOB, + manifest_private_key_id LONGBLOB, + manifest_public_key LONGBLOB, + latest_manifest_cert LONGBLOB, + latest_manifest LONGBLOB, + manifest_published DATETIME, + state ENUM ('pending', 'active', 'deprecated', 'revoked') NOT NULL, + ca_cert_uri TEXT, + ca_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ca_detail_id), + CONSTRAINT ca_detail_ca_id + FOREIGN KEY (ca_id) REFERENCES ca (ca_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE child ( + child_id SERIAL NOT NULL, + child_handle VARCHAR(255) NOT NULL, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + bsc_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (child_id), + CONSTRAINT child_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + CONSTRAINT child_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, child_handle) +) ENGINE=InnoDB; + +CREATE TABLE child_cert ( + child_cert_id SERIAL NOT NULL, + cert LONGBLOB NOT NULL, + published DATETIME, + ski TINYBLOB NOT NULL, + child_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (child_cert_id), + CONSTRAINT child_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE, + CONSTRAINT child_cert_child_id + FOREIGN KEY (child_id) REFERENCES child (child_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE revoked_cert ( + revoked_cert_id SERIAL NOT NULL, + serial BIGINT UNSIGNED NOT NULL, + revoked DATETIME NOT NULL, + expires DATETIME NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (revoked_cert_id), + CONSTRAINT revoked_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE roa ( + roa_id SERIAL NOT NULL, + asn BIGINT UNSIGNED NOT NULL, + cert LONGBLOB NOT NULL, + roa LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_id), + CONSTRAINT roa_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT roa_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE roa_prefix ( + prefix VARCHAR(40) NOT NULL, + prefixlen TINYINT UNSIGNED NOT NULL, + max_prefixlen TINYINT UNSIGNED NOT NULL, + version TINYINT UNSIGNED NOT NULL, + roa_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_id, prefix, prefixlen, max_prefixlen), + CONSTRAINT roa_prefix_roa_id + FOREIGN KEY (roa_id) REFERENCES roa (roa_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ghostbuster ( + ghostbuster_id SERIAL NOT NULL, + vcard LONGBLOB NOT NULL, + cert LONGBLOB NOT NULL, + ghostbuster LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ghostbuster_id), + CONSTRAINT ghostbuster_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ghostbuster_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ee_cert ( + ee_cert_id SERIAL NOT NULL, + ski BINARY(20) NOT NULL, + cert LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ee_cert_id), + CONSTRAINT ee_cert_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ee_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +-- Local Variables: +-- indent-tabs-mode: nil +-- End: +''' + +## @var pubd +## SQL schema pubd +pubd = '''-- $Id: pubd.sql 3465 2010-10-07 00:59:39Z sra $ + +-- Copyright (C) 2009--2010 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- SQL objects needed by pubd.py. + +-- The config table is weird because we're really only using it +-- to store one BPKI CRL, but putting this here lets us use a lot of +-- existing machinery and the alternatives are whacky in other ways. + +DROP TABLE IF EXISTS client; +DROP TABLE IF EXISTS config; + +CREATE TABLE config ( + config_id SERIAL NOT NULL, + bpki_crl LONGBLOB, + PRIMARY KEY (config_id) +) ENGINE=InnoDB; + +CREATE TABLE client ( + client_id SERIAL NOT NULL, + client_handle VARCHAR(255) NOT NULL, + base_uri TEXT, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + PRIMARY KEY (client_id), + UNIQUE (client_handle) +) ENGINE=InnoDB; + +-- Local Variables: +-- indent-tabs-mode: nil +-- End: +''' + diff --git a/rpki/sundial.py b/rpki/sundial.py new file mode 100644 index 00000000..0825d61b --- /dev/null +++ b/rpki/sundial.py @@ -0,0 +1,289 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Unified RPKI date/time handling, based on the standard Python datetime module. + +Module name chosen to sidestep a nightmare of import-related errors +that occur with the more obvious module names. + +List of arithmetic methods that require result casting was derived by +inspection of the datetime module, to wit: + + >>> import datetime + >>> for t in (datetime.datetime, datetime.timedelta): + ... for k in t.__dict__.keys(): + ... if k.startswith("__"): + ... print "%s.%s()" % (t.__name__, k) +""" + +import datetime as pydatetime +import re + +def now(): + """ + Get current timestamp. + """ + return datetime.utcnow() + +class ParseFailure(Exception): + """ + Parse failure constructing timedelta. + """ + +class datetime(pydatetime.datetime): + """ + RPKI extensions to standard datetime.datetime class. All work here + is in UTC, so we use naive datetime objects. + """ + + def totimestamp(self): + """ + Convert to seconds from epoch (like time.time()). Conversion + method is a bit silly, but avoids time module timezone whackiness. + """ + return int(self.strftime("%s")) + + @classmethod + def fromXMLtime(cls, x): + """ + Convert from XML time representation. + """ + if x is None: + return None + else: + return cls.strptime(x, "%Y-%m-%dT%H:%M:%SZ") + + def toXMLtime(self): + """ + Convert to XML time representation. + """ + return self.strftime("%Y-%m-%dT%H:%M:%SZ") + + def __str__(self): + return self.toXMLtime() + + @classmethod + def from_datetime(cls, x): + """ + Convert a datetime.datetime object into this subclass. This is + whacky due to the weird constructors for datetime. + """ + return cls.combine(x.date(), x.time()) + + def to_datetime(self): + """ + Convert to a datetime.datetime object. In most cases this + shouldn't be necessary, but convincing SQL interfaces to use + subclasses of datetime can be hard. + """ + return pydatetime.datetime(year = self.year, month = self.month, day = self.day, + hour = self.hour, minute = self.minute, second = self.second, + microsecond = 0, tzinfo = None) + + + @classmethod + def fromOpenSSL(cls, x): + """ + Convert from the format OpenSSL's command line tool uses into this + subclass. May require rewriting if we run into locale problems. + """ + if x.startswith("notBefore=") or x.startswith("notAfter="): + x = x.partition("=")[2] + return cls.strptime(x, "%b %d %H:%M:%S %Y GMT") + + @classmethod + def from_sql(cls, x): + """ + Convert from SQL storage format. + """ + return cls.from_datetime(x) + + def to_sql(self): + """ + Convert to SQL storage format. + """ + return self.to_datetime() + + def later(self, other): + """ + Return the later of two timestamps. + """ + return other if other > self else self + + def earlier(self, other): + """ + Return the earlier of two timestamps. + """ + return other if other < self else self + + def __add__(self, y): return _cast(pydatetime.datetime.__add__(self, y)) + def __radd__(self, y): return _cast(pydatetime.datetime.__radd__(self, y)) + def __rsub__(self, y): return _cast(pydatetime.datetime.__rsub__(self, y)) + def __sub__(self, y): return _cast(pydatetime.datetime.__sub__(self, y)) + + @classmethod + def DateTime_or_None(cls, s): + """ + MySQLdb converter. Parse as this class if we can, let the default + MySQLdb DateTime_or_None() converter deal with failure cases. + """ + + for sep in " T": + d, _, t = s.partition(sep) + if t: + try: + return cls(*[int(x) for x in d.split("-") + t.split(":")]) + except: + break + + from rpki.mysql_import import MySQLdb + return MySQLdb.times.DateTime_or_None(s) + +class timedelta(pydatetime.timedelta): + """ + Timedelta with text parsing. This accepts two input formats: + + - A simple integer, indicating a number of seconds. + + - A string of the form "uY vW wD xH yM zS" where u, v, w, x, y, and z + are integers and Y, W, D, H, M, and S indicate years, weeks, days, + hours, minutes, and seconds. All of the fields are optional, but + at least one must be specified. Eg,"3D4H" means "three days plus + four hours". + + There is no "months" format, because the definition of a month is too + fuzzy to be useful (what day is six months from August 30th?) + + Similarly, the "years" conversion may produce surprising results, as + "one year" in conventional English does not refer to a fixed interval + but rather a fixed (and in some cases undefined) offset within the + Gregorian calendar (what day is one year from February 29th?) 1Y as + implemented by this code refers to a specific number of seconds. + If you mean 365 days or 52 weeks, say that instead. + """ + + ## @var regexp + # Hideously ugly regular expression to parse the complex text form. + # Tags are intended for use with re.MatchObject.groupdict() and map + # directly to the keywords expected by the timedelta constructor. + + regexp = re.compile("\\s*".join(("^", + "(?:(?P<years>\\d+)Y)?", + "(?:(?P<weeks>\\d+)W)?", + "(?:(?P<days>\\d+)D)?", + "(?:(?P<hours>\\d+)H)?", + "(?:(?P<minutes>\\d+)M)?", + "(?:(?P<seconds>\\d+)S)?", + "$")), + re.I) + + ## @var years_to_seconds + # Conversion factor from years to seconds (value furnished by the + # "units" program). + + years_to_seconds = 31556926 + + @classmethod + def parse(cls, arg): + """ + Parse text into a timedelta object. + """ + if not isinstance(arg, str): + return cls(seconds = arg) + elif arg.isdigit(): + return cls(seconds = int(arg)) + else: + match = cls.regexp.match(arg) + if match: + #return cls(**dict((k, int(v)) for (k, v) in match.groupdict().items() if v is not None)) + d = match.groupdict("0") + for k, v in d.iteritems(): + d[k] = int(v) + d["days"] += d.pop("weeks") * 7 + d["seconds"] += d.pop("years") * cls.years_to_seconds + return cls(**d) + else: + raise ParseFailure, "Couldn't parse timedelta %r" % (arg,) + + def convert_to_seconds(self): + """ + Convert a timedelta interval to seconds. + """ + return self.days * 24 * 60 * 60 + self.seconds + + @classmethod + def fromtimedelta(cls, x): + """ + Convert a datetime.timedelta object into this subclass. + """ + return cls(days = x.days, seconds = x.seconds, microseconds = x.microseconds) + + def __abs__(self): return _cast(pydatetime.timedelta.__abs__(self)) + def __add__(self, x): return _cast(pydatetime.timedelta.__add__(self, x)) + def __div__(self, x): return _cast(pydatetime.timedelta.__div__(self, x)) + def __floordiv__(self, x): return _cast(pydatetime.timedelta.__floordiv__(self, x)) + def __mul__(self, x): return _cast(pydatetime.timedelta.__mul__(self, x)) + def __neg__(self): return _cast(pydatetime.timedelta.__neg__(self)) + def __pos__(self): return _cast(pydatetime.timedelta.__pos__(self)) + def __radd__(self, x): return _cast(pydatetime.timedelta.__radd__(self, x)) + def __rdiv__(self, x): return _cast(pydatetime.timedelta.__rdiv__(self, x)) + def __rfloordiv__(self, x): return _cast(pydatetime.timedelta.__rfloordiv__(self, x)) + def __rmul__(self, x): return _cast(pydatetime.timedelta.__rmul__(self, x)) + def __rsub__(self, x): return _cast(pydatetime.timedelta.__rsub__(self, x)) + def __sub__(self, x): return _cast(pydatetime.timedelta.__sub__(self, x)) + +def _cast(x): + """ + Cast result of arithmetic operations back into correct subtype. + """ + if isinstance(x, pydatetime.datetime): + return datetime.from_datetime(x) + if isinstance(x, pydatetime.timedelta): + return timedelta.fromtimedelta(x) + return x + +if __name__ == "__main__": + + def test(t): + print + print "str: ", t + print "repr: ", repr(t) + print "seconds since epoch:", t.strftime("%s") + print "XMLtime: ", t.toXMLtime() + print + + print + print "Testing time conversion routines" + test(now()) + test(now() + timedelta(days = 30)) + test(now() + timedelta.parse("3d5s")) + test(now() + timedelta.parse(" 3d 5s ")) + test(now() + timedelta.parse("1y3d5h")) diff --git a/rpki/up_down.py b/rpki/up_down.py new file mode 100644 index 00000000..d2ad85d3 --- /dev/null +++ b/rpki/up_down.py @@ -0,0 +1,732 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI "up-down" protocol. +""" + +import base64 +import lxml.etree +import rpki.resource_set +import rpki.x509 +import rpki.exceptions +import rpki.log +import rpki.xml_utils +import rpki.relaxng + +xmlns = "http://www.apnic.net/specs/rescerts/up-down/" + +nsmap = { None : xmlns } + +class base_elt(object): + """ + Generic PDU object. + + Virtual class, just provides some default methods. + """ + + def startElement(self, stack, name, attrs): + """ + Ignore startElement() if there's no specific handler. + + Some elements have no attributes and we only care about their + text content. + """ + pass + + def endElement(self, stack, name, text): + """ + Ignore endElement() if there's no specific handler. + + If we don't need to do anything else, just pop the stack. + """ + stack.pop() + + def make_elt(self, name, *attrs): + """ + Construct a element, copying over a set of attributes. + """ + elt = lxml.etree.Element("{%s}%s" % (xmlns, name), nsmap=nsmap) + for key in attrs: + val = getattr(self, key, None) + if val is not None: + elt.set(key, str(val)) + return elt + + def make_b64elt(self, elt, name, value): + """ + Construct a sub-element with Base64 text content. + """ + if value is not None and not value.empty(): + lxml.etree.SubElement(elt, "{%s}%s" % (xmlns, name), nsmap=nsmap).text = value.get_Base64() + + def serve_pdu(self, q_msg, r_msg, child, callback, errback): + """ + Default PDU handler to catch unexpected types. + """ + raise rpki.exceptions.BadQuery("Unexpected query type %s" % q_msg.type) + + def check_response(self): + """ + Placeholder for response checking. + """ + pass + +class multi_uri(list): + """ + Container for a set of URIs. + """ + + def __init__(self, ini): + """ + Initialize a set of URIs, which includes basic some syntax checking. + """ + list.__init__(self) + if isinstance(ini, (list, tuple)): + self[:] = ini + elif isinstance(ini, str): + self[:] = ini.split(",") + for s in self: + if s.strip() != s or "://" not in s: + raise rpki.exceptions.BadURISyntax("Bad URI \"%s\"" % s) + else: + raise TypeError + + def __str__(self): + """ + Convert a multi_uri back to a string representation. + """ + return ",".join(self) + + def rsync(self): + """ + Find first rsync://... URI in self. + """ + for s in self: + if s.startswith("rsync://"): + return s + return None + +class certificate_elt(base_elt): + """ + Up-Down protocol representation of an issued certificate. + """ + + def startElement(self, stack, name, attrs): + """ + Handle attributes of <certificate/> element. + """ + assert name == "certificate", "Unexpected name %s, stack %s" % (name, stack) + self.cert_url = multi_uri(attrs["cert_url"]) + self.req_resource_set_as = rpki.resource_set.resource_set_as(attrs.get("req_resource_set_as")) + self.req_resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs.get("req_resource_set_ipv4")) + self.req_resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs.get("req_resource_set_ipv6")) + + def endElement(self, stack, name, text): + """ + Handle text content of a <certificate/> element. + """ + assert name == "certificate", "Unexpected name %s, stack %s" % (name, stack) + self.cert = rpki.x509.X509(Base64 = text) + stack.pop() + + def toXML(self): + """ + Generate a <certificate/> element. + """ + elt = self.make_elt("certificate", "cert_url", + "req_resource_set_as", "req_resource_set_ipv4", "req_resource_set_ipv6") + elt.text = self.cert.get_Base64() + return elt + +class class_elt(base_elt): + """ + Up-Down protocol representation of a resource class. + """ + + issuer = None + + def __init__(self): + """ + Initialize class_elt. + """ + base_elt.__init__(self) + self.certs = [] + + def startElement(self, stack, name, attrs): + """ + Handle <class/> elements and their children. + """ + if name == "certificate": + cert = certificate_elt() + self.certs.append(cert) + stack.append(cert) + cert.startElement(stack, name, attrs) + elif name != "issuer": + assert name == "class", "Unexpected name %s, stack %s" % (name, stack) + self.class_name = attrs["class_name"] + self.cert_url = multi_uri(attrs["cert_url"]) + self.suggested_sia_head = attrs.get("suggested_sia_head") + self.resource_set_as = rpki.resource_set.resource_set_as(attrs["resource_set_as"]) + self.resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs["resource_set_ipv4"]) + self.resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs["resource_set_ipv6"]) + self.resource_set_notafter = rpki.sundial.datetime.fromXMLtime(attrs.get("resource_set_notafter")) + + def endElement(self, stack, name, text): + """ + Handle <class/> elements and their children. + """ + if name == "issuer": + self.issuer = rpki.x509.X509(Base64 = text) + else: + assert name == "class", "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Generate a <class/> element. + """ + elt = self.make_elt("class", "class_name", "cert_url", "resource_set_as", + "resource_set_ipv4", "resource_set_ipv6", + "resource_set_notafter", "suggested_sia_head") + elt.extend([i.toXML() for i in self.certs]) + self.make_b64elt(elt, "issuer", self.issuer) + return elt + + def to_resource_bag(self): + """ + Build a resource_bag from from this <class/> element. + """ + return rpki.resource_set.resource_bag(self.resource_set_as, + self.resource_set_ipv4, + self.resource_set_ipv6, + self.resource_set_notafter) + + def from_resource_bag(self, bag): + """ + Set resources of this class element from a resource_bag. + """ + self.resource_set_as = bag.asn + self.resource_set_ipv4 = bag.v4 + self.resource_set_ipv6 = bag.v6 + self.resource_set_notafter = bag.valid_until + +class list_pdu(base_elt): + """ + Up-Down protocol "list" PDU. + """ + + def toXML(self): + """Generate (empty) payload of "list" PDU.""" + return [] + + def serve_pdu(self, q_msg, r_msg, child, callback, errback): + """ + Serve one "list" PDU. + """ + + def handle(irdb_resources): + + r_msg.payload = list_response_pdu() + + if irdb_resources.valid_until < rpki.sundial.now(): + rpki.log.debug("Child %s's resources expired %s" % (child.child_handle, irdb_resources.valid_until)) + else: + for parent in child.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if not ca_detail: + rpki.log.debug("No active ca_detail, can't issue to %s" % child.child_handle) + continue + resources = ca_detail.latest_ca_cert.get_3779resources() & irdb_resources + if resources.empty(): + rpki.log.debug("No overlap between received resources and what child %s should get ([%s], [%s])" % (child.child_handle, ca_detail.latest_ca_cert.get_3779resources(), irdb_resources)) + continue + rc = class_elt() + rc.class_name = str(ca.ca_id) + rc.cert_url = multi_uri(ca_detail.ca_cert_uri) + rc.from_resource_bag(resources) + for child_cert in child.fetch_child_certs(ca_detail = ca_detail): + c = certificate_elt() + c.cert_url = multi_uri(child_cert.uri) + c.cert = child_cert.cert + rc.certs.append(c) + rc.issuer = ca_detail.latest_ca_cert + r_msg.payload.classes.append(rc) + + callback() + + self.gctx.irdb_query_child_resources(child.self.self_handle, child.child_handle, handle, errback) + + @classmethod + def query(cls, parent, cb, eb): + """ + Send a "list" query to parent. + """ + try: + rpki.log.info('Sending "list" request to parent %s' % parent.parent_handle) + parent.query_up_down(cls(), cb, eb) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + eb(e) + +class class_response_syntax(base_elt): + """ + Syntax for Up-Down protocol "list_response" and "issue_response" PDUs. + """ + + def __init__(self): + """ + Initialize class_response_syntax. + """ + base_elt.__init__(self) + self.classes = [] + + def startElement(self, stack, name, attrs): + """ + Handle "list_response" and "issue_response" PDUs. + """ + assert name == "class", "Unexpected name %s, stack %s" % (name, stack) + c = class_elt() + self.classes.append(c) + stack.append(c) + c.startElement(stack, name, attrs) + + def toXML(self): + """Generate payload of "list_response" and "issue_response" PDUs.""" + return [c.toXML() for c in self.classes] + +class list_response_pdu(class_response_syntax): + """ + Up-Down protocol "list_response" PDU. + """ + pass + +class issue_pdu(base_elt): + """ + Up-Down protocol "issue" PDU. + """ + + def startElement(self, stack, name, attrs): + """ + Handle "issue" PDU. + """ + assert name == "request", "Unexpected name %s, stack %s" % (name, stack) + self.class_name = attrs["class_name"] + self.req_resource_set_as = rpki.resource_set.resource_set_as(attrs.get("req_resource_set_as")) + self.req_resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs.get("req_resource_set_ipv4")) + self.req_resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs.get("req_resource_set_ipv6")) + + def endElement(self, stack, name, text): + """ + Handle "issue" PDU. + """ + assert name == "request", "Unexpected name %s, stack %s" % (name, stack) + self.pkcs10 = rpki.x509.PKCS10(Base64 = text) + stack.pop() + + def toXML(self): + """ + Generate payload of "issue" PDU. + """ + elt = self.make_elt("request", "class_name", "req_resource_set_as", + "req_resource_set_ipv4", "req_resource_set_ipv6") + elt.text = self.pkcs10.get_Base64() + return [elt] + + def serve_pdu(self, q_msg, r_msg, child, callback, errback): + """ + Serve one issue request PDU. + """ + + # Subsetting not yet implemented, this is the one place where we + # have to handle it, by reporting that we're lame. + + if self.req_resource_set_as or \ + self.req_resource_set_ipv4 or \ + self.req_resource_set_ipv6: + raise rpki.exceptions.NotImplementedYet("req_* attributes not implemented yet, sorry") + + # Check the request + self.pkcs10.check_valid_request_ca() + ca = child.ca_from_class_name(self.class_name) + ca_detail = ca.active_ca_detail + if ca_detail is None: + raise rpki.exceptions.NoActiveCA("No active CA for class %r" % self.class_name) + + # Check current cert, if any + + def got_resources(irdb_resources): + + if irdb_resources.valid_until < rpki.sundial.now(): + raise rpki.exceptions.IRDBExpired("IRDB entry for child %s expired %s" % ( + child.child_handle, irdb_resources.valid_until)) + + resources = irdb_resources & ca_detail.latest_ca_cert.get_3779resources() + resources.valid_until = irdb_resources.valid_until + req_key = self.pkcs10.getPublicKey() + req_sia = self.pkcs10.get_SIA() + child_cert = child.fetch_child_certs(ca_detail = ca_detail, ski = req_key.get_SKI(), unique = True) + + # Generate new cert or regenerate old one if necessary + + publisher = rpki.rpkid.publication_queue() + + if child_cert is None: + child_cert = ca_detail.issue( + ca = ca, + child = child, + subject_key = req_key, + sia = req_sia, + resources = resources, + publisher = publisher) + else: + child_cert = child_cert.reissue( + ca_detail = ca_detail, + sia = req_sia, + resources = resources, + publisher = publisher) + + def done(): + c = certificate_elt() + c.cert_url = multi_uri(child_cert.uri) + c.cert = child_cert.cert + rc = class_elt() + rc.class_name = self.class_name + rc.cert_url = multi_uri(ca_detail.ca_cert_uri) + rc.from_resource_bag(resources) + rc.certs.append(c) + rc.issuer = ca_detail.latest_ca_cert + r_msg.payload = issue_response_pdu() + r_msg.payload.classes.append(rc) + callback() + + self.gctx.sql.sweep() + assert child_cert and child_cert.sql_in_db + publisher.call_pubd(done, errback) + + self.gctx.irdb_query_child_resources(child.self.self_handle, child.child_handle, got_resources, errback) + + @classmethod + def query(cls, parent, ca, ca_detail, callback, errback): + """ + Send an "issue" request to parent associated with ca. + """ + assert ca_detail is not None and ca_detail.state in ("pending", "active") + self = cls() + self.class_name = ca.parent_resource_class + self.pkcs10 = rpki.x509.PKCS10.create( + keypair = ca_detail.private_key_id, + is_ca = True, + caRepository = ca.sia_uri, + rpkiManifest = ca_detail.manifest_uri) + rpki.log.info('Sending "issue" request to parent %s' % parent.parent_handle) + parent.query_up_down(self, callback, errback) + +class issue_response_pdu(class_response_syntax): + """ + Up-Down protocol "issue_response" PDU. + """ + + def check_response(self): + """ + Check whether this looks like a reasonable issue_response PDU. + XML schema should be tighter for this response. + """ + if len(self.classes) != 1 or len(self.classes[0].certs) != 1: + raise rpki.exceptions.BadIssueResponse + +class revoke_syntax(base_elt): + """ + Syntax for Up-Down protocol "revoke" and "revoke_response" PDUs. + """ + + def startElement(self, stack, name, attrs): + """Handle "revoke" PDU.""" + self.class_name = attrs["class_name"] + self.ski = attrs["ski"] + + def toXML(self): + """Generate payload of "revoke" PDU.""" + return [self.make_elt("key", "class_name", "ski")] + +class revoke_pdu(revoke_syntax): + """ + Up-Down protocol "revoke" PDU. + """ + + def get_SKI(self): + """ + Convert g(SKI) encoding from PDU back to raw SKI. + """ + return base64.urlsafe_b64decode(self.ski + "=") + + def serve_pdu(self, q_msg, r_msg, child, cb, eb): + """ + Serve one revoke request PDU. + """ + + def done(): + r_msg.payload = revoke_response_pdu() + r_msg.payload.class_name = self.class_name + r_msg.payload.ski = self.ski + cb() + + ca = child.ca_from_class_name(self.class_name) + publisher = rpki.rpkid.publication_queue() + for ca_detail in ca.ca_details: + for child_cert in child.fetch_child_certs(ca_detail = ca_detail, ski = self.get_SKI()): + child_cert.revoke(publisher = publisher) + self.gctx.sql.sweep() + publisher.call_pubd(done, eb) + + @classmethod + def query(cls, ca, gski, cb, eb): + """ + Send a "revoke" request for certificate(s) named by gski to parent associated with ca. + """ + parent = ca.parent + self = cls() + self.class_name = ca.parent_resource_class + self.ski = gski + rpki.log.info('Sending "revoke" request for SKI %s to parent %s' % (gski, parent.parent_handle)) + parent.query_up_down(self, cb, eb) + +class revoke_response_pdu(revoke_syntax): + """ + Up-Down protocol "revoke_response" PDU. + """ + + pass + +class error_response_pdu(base_elt): + """ + Up-Down protocol "error_response" PDU. + """ + + codes = { + 1101 : "Already processing request", + 1102 : "Version number error", + 1103 : "Unrecognised request type", + 1201 : "Request - no such resource class", + 1202 : "Request - no resources allocated in resource class", + 1203 : "Request - badly formed certificate request", + 1301 : "Revoke - no such resource class", + 1302 : "Revoke - no such key", + 2001 : "Internal Server Error - Request not performed" } + + exceptions = { + rpki.exceptions.NoActiveCA : 1202, + (rpki.exceptions.ClassNameUnknown, revoke_pdu) : 1301, + rpki.exceptions.ClassNameUnknown : 1201, + (rpki.exceptions.NotInDatabase, revoke_pdu) : 1302 } + + def __init__(self, exception = None, request_payload = None): + """ + Initialize an error_response PDU from an exception object. + """ + base_elt.__init__(self) + if exception is not None: + rpki.log.debug("Constructing up-down error response from exception %s" % exception) + exception_type = type(exception) + request_type = None if request_payload is None else type(request_payload) + rpki.log.debug("Constructing up-down error response: exception_type %s, request_type %s" % ( + exception_type, request_type)) + if False: + self.status = self.exceptions.get((exception_type, request_type), + self.exceptions.get(exception_type, + 2001)) + else: + self.status = self.exceptions.get((exception_type, request_type)) + if self.status is None: + rpki.log.debug("No request-type-specific match, trying exception match") + self.status = self.exceptions.get(exception_type) + if self.status is None: + rpki.log.debug("No exception match either, defaulting") + self.status = 2001 + self.description = str(exception) + rpki.log.debug("Chosen status code: %s" % self.status) + + def endElement(self, stack, name, text): + """ + Handle "error_response" PDU. + """ + if name == "status": + code = int(text) + if code not in self.codes: + raise rpki.exceptions.BadStatusCode("%s is not a known status code" % code) + self.status = code + elif name == "description": + self.description = text + else: + assert name == "message", "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + stack[-1].endElement(stack, name, text) + + def toXML(self): + """ + Generate payload of "error_response" PDU. + """ + assert self.status in self.codes + elt = self.make_elt("status") + elt.text = str(self.status) + payload = [elt] + if self.description: + elt = self.make_elt("description") + elt.text = str(self.description) + elt.set("{http://www.w3.org/XML/1998/namespace}lang", "en-US") + payload.append(elt) + return payload + + def check_response(self): + """ + Handle an error response. For now, just raise an exception, + perhaps figure out something more clever to do later. + """ + raise rpki.exceptions.UpstreamError(self.codes[self.status]) + +class message_pdu(base_elt): + """ + Up-Down protocol message wrapper PDU. + """ + + version = 1 + + name2type = { + "list" : list_pdu, + "list_response" : list_response_pdu, + "issue" : issue_pdu, + "issue_response" : issue_response_pdu, + "revoke" : revoke_pdu, + "revoke_response" : revoke_response_pdu, + "error_response" : error_response_pdu } + + type2name = dict((v, k) for k, v in name2type.items()) + + error_pdu_type = error_response_pdu + + def toXML(self): + """ + Generate payload of message PDU. + """ + elt = self.make_elt("message", "version", "sender", "recipient", "type") + elt.extend(self.payload.toXML()) + return elt + + def startElement(self, stack, name, attrs): + """ + Handle message PDU. + + Payload of the <message/> element varies depending on the "type" + attribute, so after some basic checks we have to instantiate the + right class object to handle whatever kind of PDU this is. + """ + assert name == "message", "Unexpected name %s, stack %s" % (name, stack) + assert self.version == int(attrs["version"]) + self.sender = attrs["sender"] + self.recipient = attrs["recipient"] + self.type = attrs["type"] + self.payload = self.name2type[attrs["type"]]() + stack.append(self.payload) + + def __str__(self): + """ + Convert a message PDU to a string. + """ + return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "UTF-8") + + def serve_top_level(self, child, callback): + """ + Serve one message request PDU. + """ + + r_msg = message_pdu() + r_msg.sender = self.recipient + r_msg.recipient = self.sender + + def done(): + r_msg.type = self.type2name[type(r_msg.payload)] + callback(r_msg) + + def lose(e): + rpki.log.traceback() + callback(self.serve_error(e)) + + try: + self.log_query(child) + self.payload.serve_pdu(self, r_msg, child, done, lose) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + lose(e) + + def log_query(self, child): + """ + Log query we're handling. Separate method so rootd can override. + """ + rpki.log.info("Serving %s query from child %s [sender %s, recipient %s]" % (self.type, child.child_handle, self.sender, self.recipient)) + + def serve_error(self, exception): + """ + Generate an error_response message PDU. + """ + r_msg = message_pdu() + r_msg.sender = self.recipient + r_msg.recipient = self.sender + r_msg.payload = self.error_pdu_type(exception, self.payload) + r_msg.type = self.type2name[type(r_msg.payload)] + return r_msg + + @classmethod + def make_query(cls, payload, sender, recipient): + """ + Construct one message PDU. + """ + assert not cls.type2name[type(payload)].endswith("_response") + if sender is None: + sender = "tweedledee" + if recipient is None: + recipient = "tweedledum" + self = cls() + self.sender = sender + self.recipient = recipient + self.payload = payload + self.type = self.type2name[type(payload)] + return self + +class sax_handler(rpki.xml_utils.sax_handler): + """ + SAX handler for Up-Down protocol. + """ + + pdu = message_pdu + name = "message" + version = "1" + +class cms_msg(rpki.x509.XML_CMS_object): + """ + Class to hold a CMS-signed up-down PDU. + """ + + encoding = "UTF-8" + schema = rpki.relaxng.up_down + saxify = sax_handler.saxify + allow_extra_certs = True + allow_extra_crls = True diff --git a/rpki/x509.py b/rpki/x509.py new file mode 100644 index 00000000..fb1a5a2b --- /dev/null +++ b/rpki/x509.py @@ -0,0 +1,2031 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +One X.509 implementation to rule them all... + +...and in the darkness hide the twisty maze of partially-overlapping +X.509 support packages in Python. + +Once upon a time we were using four separate partially-overlapping +implementions of X.509 and related protocols. Over the years this has +collapsed down to one, but the interface module we built on top of the +previous mess has itself become heavily embedded in the code base. So +this is a bit more complicated (not to mention baroque) than one might +expect for a module that had grown in a saner fashion. We clean up +bits of it from time to time. Some day this may all make sense. +""" + +import rpki.POW +import base64 +import lxml.etree +import os +import subprocess +import email.mime.application +import email.utils +import mailbox +import time +import rpki.exceptions +import rpki.resource_set +import rpki.oids +import rpki.sundial +import rpki.log +import rpki.async +import rpki.relaxng + +def base64_with_linebreaks(der): + """ + Encode DER (really, anything) as Base64 text, with linebreaks to + keep the result (sort of) readable. + """ + b = base64.b64encode(der) + n = len(b) + return "\n" + "\n".join(b[i : min(i + 64, n)] for i in xrange(0, n, 64)) + "\n" + +def looks_like_PEM(text): + """ + Guess whether text looks like a PEM encoding. + """ + + i = text.find("-----BEGIN ") + return i >= 0 and text.find("\n-----END ", i) > i + +def first_rsync_uri(xia): + """ + Find first rsync URI in a sequence of AIA or SIA URIs. + Returns the URI if found, otherwise None. + """ + + if xia is not None: + for uri in xia: + if uri.startswith("rsync://"): + return uri + return None + +class X501DN(object): + """ + Class to hold an X.501 Distinguished Name. + + This is nothing like a complete implementation, just enough for our + purposes. See RFC 5280 4.1.2.4 for the ASN.1 details. In brief: + + - A DN is a SEQUENCE OF RDNs. + + - A RDN is a SET OF AttributeAndValues; in practice, multi-value + RDNs are rare, so an RDN is almost always a set with a single + element. + + - An AttributeAndValue is a SEQUENCE consisting of a OID and a + value, where a whole bunch of things including both syntax and + semantics of the value are determined by the OID. + + - The value is some kind of ASN.1 string; there are far too many + encoding options options, most of which are either strongly + discouraged or outright forbidden by the PKIX profile, but which + persist for historical reasons. The only ones PKIX actually + likes are PrintableString and UTF8String, but there are nuances + and special cases where some of the others are required. + + The RPKI profile further restricts DNs to a single mandatory + CommonName attribute with a single optional SerialNumber attribute + (not to be confused with the certificate serial number). + + BPKI certificates should (we hope) follow the general PKIX guideline + but the ones we construct ourselves are likely to be relatively + simple. + """ + + def __str__(self): + return "".join("/" + "+".join("%s=%s" % (rpki.oids.oid2name(a[0]), a[1]) + for a in rdn) + for rdn in self.dn) + + def __cmp__(self, other): + return cmp(self.dn, other.dn) + + def __repr__(self): + return rpki.log.log_repr(self, str(self)) + + def _debug(self): + if False: + import traceback + for chunk in traceback.format_stack(limit = 5): + for line in chunk.splitlines(): + rpki.log.debug("== %s" % line) + rpki.log.debug("++ %r %r" % (self, self.dn)) + + @classmethod + def from_cn(cls, cn, sn = None): + assert isinstance(cn, (str, unicode)) + if isinstance(sn, (int, long)): + sn = "%08X" % sn + elif isinstance(sn, (str, unicode)): + assert all(c in "0123456789abcdefABCDEF" for c in sn) + sn = str(sn) + self = cls() + if sn is not None: + self.dn = (((rpki.oids.commonName, cn),), ((rpki.oids.serialNumber, sn),)) + else: + self.dn = (((rpki.oids.commonName, cn),),) + return self + + @classmethod + def from_POW(cls, t): + assert isinstance(t, tuple) + self = cls() + self.dn = t + return self + + def get_POW(self): + return self.dn + + def extract_cn_and_sn(self): + cn = None + sn = None + + for rdn in self.dn: + if len(rdn) == 1 and len(rdn[0]) == 2: + oid = rdn[0][0] + val = rdn[0][1] + if oid == rpki.oids.commonName and cn is None: + cn = val + continue + if oid == rpki.oids.serialNumber and sn is None: + sn = val + continue + raise rpki.exceptions.BadX510DN("Bad subject name: %s" % (self.dn,)) + + if cn is None: + raise rpki.exceptions.BadX510DN("Subject name is missing CN: %s" % (self.dn,)) + + return cn, sn + + +class DER_object(object): + """ + Virtual class to hold a generic DER object. + """ + + ## @var formats + # Formats supported in this object. This is kind of redundant now + # that we're down to a single ASN.1 package and everything supports + # the same DER and POW formats, it's mostly historical baggage from + # the days when we had three different ASN.1 encoders, each with its + # own low-level Python object format. Clean up, some day. + formats = ("DER", "POW") + + ## @var POW_class + # Class of underlying POW object. Concrete subclasses must supply this. + POW_class = None + + ## Other attributes that self.clear() should whack. + other_clear = () + + ## @var DER + # DER value of this object + DER = None + + ## @var failure_threshold + # Rate-limiting interval between whines about Auto_update objects. + failure_threshold = rpki.sundial.timedelta(minutes = 5) + + def empty(self): + """ + Test whether this object is empty. + """ + return all(getattr(self, a, None) is None for a in self.formats) + + def clear(self): + """ + Make this object empty. + """ + for a in self.formats + self.other_clear: + setattr(self, a, None) + self.filename = None + self.timestamp = None + self.lastfail = None + + def __init__(self, **kw): + """ + Initialize a DER_object. + """ + self.clear() + if len(kw): + self.set(**kw) + + def set(self, **kw): + """ + Set this object by setting one of its known formats. + + This method only allows one to set one format at a time. + Subsequent calls will clear the object first. The point of all + this is to let the object's internal converters handle mustering + the object into whatever format you need at the moment. + """ + + if len(kw) == 1: + name = kw.keys()[0] + if name in self.formats: + self.clear() + setattr(self, name, kw[name]) + return + if name == "PEM": + self.clear() + self._set_PEM(kw[name]) + return + if name == "Base64": + self.clear() + self.DER = base64.b64decode(kw[name]) + return + if name == "Auto_update": + self.filename = kw[name] + self.check_auto_update() + return + if name in ("PEM_file", "DER_file", "Auto_file"): + f = open(kw[name], "rb") + value = f.read() + f.close() + self.clear() + if name == "PEM_file" or (name == "Auto_file" and looks_like_PEM(value)): + self._set_PEM(value) + else: + self.DER = value + return + raise rpki.exceptions.DERObjectConversionError("Can't honor conversion request %r" % (kw,)) + + def check_auto_update(self): + """ + Check for updates to a DER object that auto-updates from a file. + """ + if self.filename is None: + return + try: + filename = self.filename + timestamp = os.stat(self.filename).st_mtime + if self.timestamp is None or self.timestamp < timestamp: + rpki.log.debug("Updating %s, timestamp %s" % (filename, rpki.sundial.datetime.fromtimestamp(timestamp))) + f = open(filename, "rb") + value = f.read() + f.close() + self.clear() + if looks_like_PEM(value): + self._set_PEM(value) + else: + self.DER = value + self.filename = filename + self.timestamp = timestamp + except (IOError, OSError), e: + now = rpki.sundial.now() + if self.lastfail is None or now > self.lastfail + self.failure_threshold: + rpki.log.warn("Could not auto_update %r (last failure %s): %s" % (self, self.lastfail, e)) + self.lastfail = now + else: + self.lastfail = None + + def check(self): + """ + Perform basic checks on a DER object. + """ + self.check_auto_update() + assert not self.empty() + + def _set_PEM(self, pem): + """ + Set the POW value of this object based on a PEM input value. + Subclasses may need to override this. + """ + assert self.empty() + self.POW = self.POW_class.pemRead(pem) + + def get_DER(self): + """ + Get the DER value of this object. + Subclasses may need to override this method. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this object. + Subclasses may need to override this method. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = self.POW_class.derRead(self.get_DER()) + return self.POW + + def get_Base64(self): + """ + Get the Base64 encoding of the DER value of this object. + """ + return base64_with_linebreaks(self.get_DER()) + + def get_PEM(self): + """ + Get the PEM representation of this object. + """ + return self.get_POW().pemWrite() + + def __cmp__(self, other): + """ + Compare two DER-encoded objects. + """ + if self is None and other is None: + return 0 + elif self is None: + return -1 + elif other is None: + return 1 + elif isinstance(other, str): + return cmp(self.get_DER(), other) + else: + return cmp(self.get_DER(), other.get_DER()) + + def hSKI(self): + """ + Return hexadecimal string representation of SKI for this object. + Only work for subclasses that implement get_SKI(). + """ + ski = self.get_SKI() + return ":".join(("%02X" % ord(i) for i in ski)) if ski else "" + + def gSKI(self): + """ + Calculate g(SKI) for this object. Only work for subclasses + that implement get_SKI(). + """ + return base64.urlsafe_b64encode(self.get_SKI()).rstrip("=") + + def hAKI(self): + """ + Return hexadecimal string representation of AKI for this + object. Only work for subclasses that implement get_AKI(). + """ + aki = self.get_AKI() + return ":".join(("%02X" % ord(i) for i in aki)) if aki else "" + + def gAKI(self): + """ + Calculate g(AKI) for this object. Only work for subclasses + that implement get_AKI(). + """ + return base64.urlsafe_b64encode(self.get_AKI()).rstrip("=") + + def get_AKI(self): + """ + Get the AKI extension from this object, if supported. + """ + return self.get_POW().getAKI() + + def get_SKI(self): + """ + Get the SKI extension from this object, if supported. + """ + return self.get_POW().getSKI() + + def get_EKU(self): + """ + Get the Extended Key Usage extension from this object, if supported. + """ + return self.get_POW().getEKU() + + def get_SIA(self): + """ + Get the SIA extension from this object. Only works for subclasses + that support getSIA(). + """ + return self.get_POW().getSIA() + + def get_sia_directory_uri(self): + """ + Get SIA directory (id-ad-caRepository) URI from this object. + Only works for subclasses that support getSIA(). + """ + sia = self.get_POW().getSIA() + return None if sia is None else first_rsync_uri(sia[0]) + + def get_sia_manifest_uri(self): + """ + Get SIA manifest (id-ad-rpkiManifest) URI from this object. + Only works for subclasses that support getSIA(). + """ + sia = self.get_POW().getSIA() + return None if sia is None else first_rsync_uri(sia[1]) + + def get_sia_object_uri(self): + """ + Get SIA object (id-ad-signedObject) URI from this object. + Only works for subclasses that support getSIA(). + """ + sia = self.get_POW().getSIA() + return None if sia is None else first_rsync_uri(sia[2]) + + def get_AIA(self): + """ + Get the SIA extension from this object. Only works for subclasses + that support getAIA(). + """ + return self.get_POW().getAIA() + + def get_aia_uri(self): + """ + Get AIA (id-ad-caIssuers) URI from this object. + Only works for subclasses that support getAIA(). + """ + return first_rsync_uri(self.get_POW().getAIA()) + + def get_basicConstraints(self): + """ + Get the basicConstraints extension from this object. Only works + for subclasses that support getExtension(). + """ + return self.get_POW().getBasicConstraints() + + def is_CA(self): + """ + Return True if and only if object has the basicConstraints + extension and its cA value is true. + """ + basicConstraints = self.get_basicConstraints() + return basicConstraints is not None and basicConstraints[0] + + def get_3779resources(self): + """ + Get RFC 3779 resources as rpki.resource_set objects. + """ + resources = rpki.resource_set.resource_bag.from_POW_rfc3779(self.get_POW().getRFC3779()) + try: + resources.valid_until = self.getNotAfter() + except AttributeError: + pass + return resources + + @classmethod + def from_sql(cls, x): + """ + Convert from SQL storage format. + """ + return cls(DER = x) + + def to_sql(self): + """ + Convert to SQL storage format. + """ + return self.get_DER() + + def dumpasn1(self): + """ + Pretty print an ASN.1 DER object using cryptlib dumpasn1 tool. + Use a temporary file rather than popen4() because dumpasn1 uses + seek() when decoding ASN.1 content nested in OCTET STRING values. + """ + + ret = None + fn = "dumpasn1.%d.tmp" % os.getpid() + try: + f = open(fn, "wb") + f.write(self.get_DER()) + f.close() + p = subprocess.Popen(("dumpasn1", "-a", fn), stdout = subprocess.PIPE, stderr = subprocess.STDOUT) + ret = "\n".join(x for x in p.communicate()[0].splitlines() if x.startswith(" ")) + except Exception, e: + ret = "[Could not run dumpasn1: %s]" % e + finally: + os.unlink(fn) + return ret + + def tracking_data(self, uri): + """ + Return a string containing data we want to log when tracking how + objects move through the RPKI system. Subclasses may wrap this to + provide more information, but should make sure to include at least + this information at the start of the tracking line. + """ + try: + d = rpki.POW.Digest(rpki.POW.SHA1_DIGEST) + d.update(self.get_DER()) + return "%s %s %s" % (uri, self.creation_timestamp, + "".join(("%02X" % ord(b) for b in d.digest()))) + except: # pylint: disable=W0702 + return uri + + def __getstate__(self): + """ + Pickling protocol -- pickle the DER encoding. + """ + return self.get_DER() + + def __setstate__(self, state): + """ + Pickling protocol -- unpickle the DER encoding. + """ + self.set(DER = state) + +class X509(DER_object): + """ + X.509 certificates. + + This class is designed to hold all the different representations of + X.509 certs we're using and convert between them. X.509 support in + Python a nasty maze of half-cooked stuff (except perhaps for + cryptlib, which is just different). Users of this module should not + have to care about this implementation nightmare. + """ + + POW_class = rpki.POW.X509 + + def getIssuer(self): + """ + Get the issuer of this certificate. + """ + return X501DN.from_POW(self.get_POW().getIssuer()) + + def getSubject(self): + """ + Get the subject of this certificate. + """ + return X501DN.from_POW(self.get_POW().getSubject()) + + def getNotBefore(self): + """ + Get the inception time of this certificate. + """ + return self.get_POW().getNotBefore() + + def getNotAfter(self): + """ + Get the expiration time of this certificate. + """ + return self.get_POW().getNotAfter() + + def getSerial(self): + """ + Get the serial number of this certificate. + """ + return self.get_POW().getSerial() + + def getPublicKey(self): + """ + Extract the public key from this certificate. + """ + return PublicKey(POW = self.get_POW().getPublicKey()) + + def get_SKI(self): + """ + Get the SKI extension from this object. + """ + return self.get_POW().getSKI() + + def expired(self): + """ + Test whether this certificate has expired. + """ + return self.getNotAfter() <= rpki.sundial.now() + + def issue(self, keypair, subject_key, serial, sia, aia, crldp, notAfter, + cn = None, resources = None, is_ca = True, notBefore = None, + sn = None, eku = None): + """ + Issue an RPKI certificate. + """ + + assert aia is not None and crldp is not None + + assert eku is None or not is_ca + + return self._issue( + keypair = keypair, + subject_key = subject_key, + serial = serial, + sia = sia, + aia = aia, + crldp = crldp, + notBefore = notBefore, + notAfter = notAfter, + cn = cn, + sn = sn, + resources = resources, + is_ca = is_ca, + aki = self.get_SKI(), + issuer_name = self.getSubject(), + eku = eku) + + + @classmethod + def self_certify(cls, keypair, subject_key, serial, sia, notAfter, + cn = None, resources = None, notBefore = None, + sn = None): + """ + Generate a self-certified RPKI certificate. + """ + + ski = subject_key.get_SKI() + + if cn is None: + cn = "".join(("%02X" % ord(i) for i in ski)) + + return cls._issue( + keypair = keypair, + subject_key = subject_key, + serial = serial, + sia = sia, + aia = None, + crldp = None, + notBefore = notBefore, + notAfter = notAfter, + cn = cn, + sn = sn, + resources = resources, + is_ca = True, + aki = ski, + issuer_name = X501DN.from_cn(cn, sn), + eku = None) + + + @classmethod + def _issue(cls, keypair, subject_key, serial, sia, aia, crldp, notAfter, + cn, sn, resources, is_ca, aki, issuer_name, notBefore, eku): + """ + Common code to issue an RPKI certificate. + """ + + now = rpki.sundial.now() + ski = subject_key.get_SKI() + + if notBefore is None: + notBefore = now + + if cn is None: + cn = "".join(("%02X" % ord(i) for i in ski)) + + if now >= notAfter: + raise rpki.exceptions.PastNotAfter("notAfter value %s is already in the past" % notAfter) + + if notBefore >= notAfter: + raise rpki.exceptions.NullValidityInterval("notAfter value %s predates notBefore value %s" % + (notAfter, notBefore)) + + cert = rpki.POW.X509() + + cert.setVersion(2) + cert.setSerial(serial) + cert.setIssuer(issuer_name.get_POW()) + cert.setSubject(X501DN.from_cn(cn, sn).get_POW()) + cert.setNotBefore(notBefore) + cert.setNotAfter(notAfter) + cert.setPublicKey(subject_key.get_POW()) + cert.setSKI(ski) + cert.setAKI(aki) + cert.setCertificatePolicies((rpki.oids.id_cp_ipAddr_asNumber,)) + + if crldp is not None: + cert.setCRLDP((crldp,)) + + if aia is not None: + cert.setAIA((aia,)) + + if is_ca: + cert.setBasicConstraints(True, None) + cert.setKeyUsage(frozenset(("keyCertSign", "cRLSign"))) + + else: + cert.setKeyUsage(frozenset(("digitalSignature",))) + + assert sia is not None or not is_ca + + if sia is not None: + caRepository, rpkiManifest, signedObject = sia + cert.setSIA( + (caRepository,) if isinstance(caRepository, str) else caRepository, + (rpkiManifest,) if isinstance(rpkiManifest, str) else rpkiManifest, + (signedObject,) if isinstance(signedObject, str) else signedObject) + + if resources is not None: + cert.setRFC3779( + asn = ("inherit" if resources.asn.inherit else + ((r.min, r.max) for r in resources.asn)), + ipv4 = ("inherit" if resources.v4.inherit else + ((r.min, r.max) for r in resources.v4)), + ipv6 = ("inherit" if resources.v6.inherit else + ((r.min, r.max) for r in resources.v6))) + + if eku is not None: + assert not is_ca + cert.setEKU(eku) + + cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) + + return cls(POW = cert) + + def bpki_cross_certify(self, keypair, source_cert, serial, notAfter, + now = None, pathLenConstraint = 0): + """ + Issue a BPKI certificate with values taking from an existing certificate. + """ + return self.bpki_certify( + keypair = keypair, + subject_name = source_cert.getSubject(), + subject_key = source_cert.getPublicKey(), + serial = serial, + notAfter = notAfter, + now = now, + pathLenConstraint = pathLenConstraint, + is_ca = True) + + @classmethod + def bpki_self_certify(cls, keypair, subject_name, serial, notAfter, + now = None, pathLenConstraint = None): + """ + Issue a self-signed BPKI CA certificate. + """ + return cls._bpki_certify( + keypair = keypair, + issuer_name = subject_name, + subject_name = subject_name, + subject_key = keypair.get_public(), + serial = serial, + now = now, + notAfter = notAfter, + pathLenConstraint = pathLenConstraint, + is_ca = True) + + def bpki_certify(self, keypair, subject_name, subject_key, serial, notAfter, is_ca, + now = None, pathLenConstraint = None): + """ + Issue a normal BPKI certificate. + """ + assert keypair.get_public() == self.getPublicKey() + return self._bpki_certify( + keypair = keypair, + issuer_name = self.getSubject(), + subject_name = subject_name, + subject_key = subject_key, + serial = serial, + now = now, + notAfter = notAfter, + pathLenConstraint = pathLenConstraint, + is_ca = is_ca) + + @classmethod + def _bpki_certify(cls, keypair, issuer_name, subject_name, subject_key, + serial, now, notAfter, pathLenConstraint, is_ca): + """ + Issue a BPKI certificate. This internal method does the real + work, after one of the wrapper methods has extracted the relevant + fields. + """ + + if now is None: + now = rpki.sundial.now() + + issuer_key = keypair.get_public() + + assert (issuer_key == subject_key) == (issuer_name == subject_name) + assert is_ca or issuer_name != subject_name + assert is_ca or pathLenConstraint is None + assert pathLenConstraint is None or (isinstance(pathLenConstraint, (int, long)) and + pathLenConstraint >= 0) + + cert = rpki.POW.X509() + cert.setVersion(2) + cert.setSerial(serial) + cert.setIssuer(issuer_name.get_POW()) + cert.setSubject(subject_name.get_POW()) + cert.setNotBefore(now) + cert.setNotAfter(notAfter) + cert.setPublicKey(subject_key.get_POW()) + cert.setSKI(subject_key.get_POW().calculateSKI()) + if issuer_key != subject_key: + cert.setAKI(issuer_key.get_POW().calculateSKI()) + if is_ca: + cert.setBasicConstraints(True, pathLenConstraint) + cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) + return cls(POW = cert) + + @classmethod + def normalize_chain(cls, chain): + """ + Normalize a chain of certificates into a tuple of X509 objects. + Given all the glue certificates needed for BPKI cross + certification, it's easiest to allow sloppy arguments to the CMS + validation methods and provide a single method that normalizes the + allowed cases. So this method allows X509, None, lists, and + tuples, and returns a tuple of X509 objects. + """ + if isinstance(chain, cls): + chain = (chain,) + return tuple(x for x in chain if x is not None) + + @property + def creation_timestamp(self): + """ + Time at which this object was created. + """ + return self.getNotBefore() + +class PKCS10(DER_object): + """ + Class to hold a PKCS #10 request. + """ + + POW_class = rpki.POW.PKCS10 + + ## @var expected_ca_keyUsage + # KeyUsage extension flags expected for CA requests. + + expected_ca_keyUsage = frozenset(("keyCertSign", "cRLSign")) + + ## @var allowed_extensions + # Extensions allowed by RPKI profile. + + allowed_extensions = frozenset((rpki.oids.basicConstraints, + rpki.oids.keyUsage, + rpki.oids.subjectInfoAccess, + rpki.oids.extendedKeyUsage)) + + + def get_DER(self): + """ + Get the DER value of this certification request. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this certification request. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.PKCS10.derRead(self.get_DER()) + return self.POW + + def getSubject(self): + """ + Extract the subject name from this certification request. + """ + return X501DN.from_POW(self.get_POW().getSubject()) + + def getPublicKey(self): + """ + Extract the public key from this certification request. + """ + return PublicKey(POW = self.get_POW().getPublicKey()) + + def get_SKI(self): + """ + Compute SKI for public key from this certification request. + """ + return self.getPublicKey().get_SKI() + + + def check_valid_request_common(self): + """ + Common code for checking this certification requests to see + whether they conform to the RPKI certificate profile. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + + You probably don't want to call this directly, as it only performs + the checks that are common to all RPKI certificates. + """ + + if not self.get_POW().verify(): + raise rpki.exceptions.BadPKCS10("PKCS #10 signature check failed") + + ver = self.get_POW().getVersion() + + if ver != 0: + raise rpki.exceptions.BadPKCS10("PKCS #10 request has bad version number %s" % ver) + + ku = self.get_POW().getKeyUsage() + + if ku is not None and self.expected_ca_keyUsage != ku: + raise rpki.exceptions.BadPKCS10("PKCS #10 keyUsage doesn't match profile: %r" % ku) + + forbidden_extensions = self.get_POW().getExtensionOIDs() - self.allowed_extensions + + if forbidden_extensions: + raise rpki.exceptions.BadExtension("Forbidden extension%s in PKCS #10 certificate request: %s" % ( + "" if len(forbidden_extensions) == 1 else "s", + ", ".join(forbidden_extensions))) + + + def check_valid_request_ca(self): + """ + Check this certification request to see whether it's a valid + request for an RPKI CA certificate. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + """ + + self.check_valid_request_common() + + alg = self.get_POW().getSignatureAlgorithm() + bc = self.get_POW().getBasicConstraints() + eku = self.get_POW().getEKU() + sias = self.get_POW().getSIA() + + if alg != rpki.oids.sha256WithRSAEncryption: + raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for CA: %s" % alg) + + if bc is None or not bc[0] or bc[1] is not None: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA bad basicConstraints") + + if eku is not None: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA EKU not allowed") + + if sias is None: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA missing") + + caRepository, rpkiManifest, signedObject = sias + + if signedObject: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must not have id-ad-signedObject") + + if not caRepository: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must have id-ad-caRepository") + + if not any(uri.startswith("rsync://") for uri in caRepository): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-caRepository contains no rsync URIs") + + if any(uri.startswith("rsync://") and not uri.endswith("/") for uri in caRepository): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-caRepository does not end with slash") + + if not rpkiManifest: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must have id-ad-rpkiManifest") + + if not any(uri.startswith("rsync://") for uri in rpkiManifest): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-rpkiManifest contains no rsync URIs") + + if any(uri.startswith("rsync://") and uri.endswith("/") for uri in rpkiManifest): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-rpkiManifest ends with slash") + + + def check_valid_request_ee(self): + """ + Check this certification request to see whether it's a valid + request for an RPKI EE certificate. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + + We're a bit less strict here than we are for either CA + certificates or BGPSEC router certificates, because the profile is + less tightly nailed down for unspecified-use RPKI EE certificates. + Future specific purposes may impose tighter constraints. + + Note that this method does NOT apply to so-called "infrastructure" + EE certificates (eg, the EE certificates embedded in manifests and + ROAs); those are constrained fairly tightly, but they're also + generated internally so we don't need to check them as user or + protocol input. + """ + + self.check_valid_request_common() + + alg = self.get_POW().getSignatureAlgorithm() + bc = self.get_POW().getBasicConstraints() + sia = self.get_POW().getSIA() + + caRepository, rpkiManifest, signedObject = sia or (None, None, None) + + if alg not in (rpki.oids.sha256WithRSAEncryption, rpki.oids.ecdsa_with_SHA256): + raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for EE: %s" % alg) + + if bc is not None and (bc[0] or bc[1] is not None): + raise rpki.exceptions.BadPKCS10("PKCS #10 EE has bad basicConstraints") + + if caRepository: + raise rpki.exceptions.BadPKCS10("PKCS #10 EE must not have id-ad-caRepository") + + if rpkiManifest: + raise rpki.exceptions.BadPKCS10("PKCS #10 EE must not have id-ad-rpkiManifest") + + if signedObject and not any(uri.startswith("rsync://") for uri in signedObject): + raise rpki.exceptions.BadPKCS10("PKCS #10 EE SIA id-ad-signedObject contains no rsync URIs") + + + def check_valid_request_router(self): + """ + Check this certification request to see whether it's a valid + request for a BGPSEC router certificate. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + + draft-ietf-sidr-bgpsec-pki-profiles 3.2 says follow RFC 6487 3 + except where explicitly overriden, and does not override for SIA. + But draft-ietf-sidr-bgpsec-pki-profiles also says that router + certificates don't get SIA, while RFC 6487 requires SIA. So what + do we do with SIA in PKCS #10 for router certificates? + + For the moment, ignore it, but make sure we don't include it in + the certificate when we get to the code that generates that. + """ + + self.check_valid_request_ee() + + alg = self.get_POW().getSignatureAlgorithm() + eku = self.get_POW().getEKU() + + if alg != rpki.oids.ecdsa_with_SHA256: + raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for router: %s" % alg) + + # Not really clear to me whether PKCS #10 should have EKU or not, so allow + # either, but insist that it be the right one if present. + + if eku is not None and rpki.oids.id_kp_bgpsec_router not in eku: + raise rpki.exceptions.BadPKCS10("PKCS #10 router must have EKU") + + + @classmethod + def create(cls, keypair, exts = None, is_ca = False, + caRepository = None, rpkiManifest = None, signedObject = None, + cn = None, sn = None, eku = None): + """ + Create a new request for a given keypair. + """ + + assert exts is None, "Old calling sequence to rpki.x509.PKCS10.create()" + + if cn is None: + cn = "".join(("%02X" % ord(i) for i in keypair.get_SKI())) + + if isinstance(caRepository, str): + caRepository = (caRepository,) + + if isinstance(rpkiManifest, str): + rpkiManifest = (rpkiManifest,) + + if isinstance(signedObject, str): + signedObject = (signedObject,) + + req = rpki.POW.PKCS10() + req.setVersion(0) + req.setSubject(X501DN.from_cn(cn, sn).get_POW()) + req.setPublicKey(keypair.get_POW()) + + if is_ca: + req.setBasicConstraints(True, None) + req.setKeyUsage(cls.expected_ca_keyUsage) + + if caRepository or rpkiManifest or signedObject: + req.setSIA(caRepository, rpkiManifest, signedObject) + + if eku: + req.setEKU(eku) + + req.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) + return cls(POW = req) + +## @var generate_insecure_debug_only_rsa_key +# Debugging hack to let us save throwaway RSA keys from one debug +# session to the next. DO NOT USE THIS IN PRODUCTION. + +generate_insecure_debug_only_rsa_key = None + +class insecure_debug_only_rsa_key_generator(object): + + def __init__(self, filename, keyno = 0): + try: + try: + import gdbm as dbm_du_jour + except ImportError: + import dbm as dbm_du_jour + self.keyno = long(keyno) + self.filename = filename + self.db = dbm_du_jour.open(filename, "c") + except: + rpki.log.warn("insecure_debug_only_rsa_key_generator initialization FAILED, hack inoperative") + raise + + def __call__(self): + k = str(self.keyno) + try: + v = rpki.POW.Asymmetric.derReadPrivate(self.db[k]) + except KeyError: + v = rpki.POW.Asymmetric.generateRSA(2048) + self.db[k] = v.derWritePrivate() + self.keyno += 1 + return v + + +class PrivateKey(DER_object): + """ + Class to hold a Public/Private key pair. + """ + + POW_class = rpki.POW.Asymmetric + + def get_DER(self): + """ + Get the DER value of this keypair. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWritePrivate() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this keypair. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.Asymmetric.derReadPrivate(self.get_DER()) + return self.POW + + def get_PEM(self): + """ + Get the PEM representation of this keypair. + """ + return self.get_POW().pemWritePrivate() + + def _set_PEM(self, pem): + """ + Set the POW value of this keypair from a PEM string. + """ + assert self.empty() + self.POW = self.POW_class.pemReadPrivate(pem) + + def get_public_DER(self): + """ + Get the DER encoding of the public key from this keypair. + """ + return self.get_POW().derWritePublic() + + def get_SKI(self): + """ + Calculate the SKI of this keypair. + """ + return self.get_POW().calculateSKI() + + def get_public(self): + """ + Convert the public key of this keypair into a PublicKey object. + """ + return PublicKey(DER = self.get_public_DER()) + +class PublicKey(DER_object): + """ + Class to hold a public key. + """ + + POW_class = rpki.POW.Asymmetric + + def get_DER(self): + """ + Get the DER value of this public key. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWritePublic() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this public key. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.Asymmetric.derReadPublic(self.get_DER()) + return self.POW + + def get_PEM(self): + """ + Get the PEM representation of this public key. + """ + return self.get_POW().pemWritePublic() + + def _set_PEM(self, pem): + """ + Set the POW value of this public key from a PEM string. + """ + assert self.empty() + self.POW = self.POW_class.pemReadPublic(pem) + + def get_SKI(self): + """ + Calculate the SKI of this public key. + """ + return self.get_POW().calculateSKI() + +class KeyParams(DER_object): + """ + Wrapper for OpenSSL's asymmetric key parameter classes. + """ + + POW_class = rpki.POW.AsymmetricParams + + @classmethod + def generateEC(cls, curve = rpki.POW.EC_P256_CURVE): + return cls(POW = rpki.POW.AsymmetricParams.generateEC(curve = curve)) + +class RSA(PrivateKey): + """ + Class to hold an RSA key pair. + """ + + @classmethod + def generate(cls, keylength = 2048, quiet = False): + """ + Generate a new keypair. + """ + if not quiet: + rpki.log.debug("Generating new %d-bit RSA key" % keylength) + if generate_insecure_debug_only_rsa_key is not None: + return cls(POW = generate_insecure_debug_only_rsa_key()) + else: + return cls(POW = rpki.POW.Asymmetric.generateRSA(keylength)) + +class ECDSA(PrivateKey): + """ + Class to hold an ECDSA key pair. + """ + + @classmethod + def generate(cls, params = None, quiet = False): + """ + Generate a new keypair. + """ + + if params is None: + if not quiet: + rpki.log.debug("Generating new ECDSA key parameters") + params = KeyParams.generateEC() + + assert isinstance(params, KeyParams) + + if not quiet: + rpki.log.debug("Generating new ECDSA key") + + return cls(POW = rpki.POW.Asymmetric.generateFromParams(params.get_POW())) + +class CMS_object(DER_object): + """ + Abstract class to hold a CMS object. + """ + + econtent_oid = rpki.oids.id_data + POW_class = rpki.POW.CMS + + ## @var dump_on_verify_failure + # Set this to True to get dumpasn1 dumps of ASN.1 on CMS verify failures. + + dump_on_verify_failure = True + + ## @var debug_cms_certs + # Set this to True to log a lot of chatter about CMS certificates. + + debug_cms_certs = False + + ## @var dump_using_dumpasn1 + # Set this to use external dumpasn1 program, which is prettier and + # more informative than OpenSSL's CMS text dump, but which won't + # work if the dumpasn1 program isn't installed. + + dump_using_dumpasn1 = False + + ## @var require_crls + # Set this to False to make CMS CRLs optional in the cases where we + # would otherwise require them. Some day this option should go away + # and CRLs should be uncondtionally mandatory in such cases. + + require_crls = False + + ## @var allow_extra_certs + # Set this to True to allow CMS messages to contain CA certificates. + + allow_extra_certs = False + + ## @var allow_extra_crls + # Set this to True to allow CMS messages to contain multiple CRLs. + + allow_extra_crls = False + + ## @var print_on_der_error + # Set this to True to log alleged DER when we have trouble parsing + # it, in case it's really a Perl backtrace or something. + + print_on_der_error = True + + def get_DER(self): + """ + Get the DER value of this CMS_object. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this CMS_object. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = self.POW_class.derRead(self.get_DER()) + return self.POW + + def get_signingTime(self): + """ + Extract signingTime from CMS signed attributes. + """ + return self.get_POW().signingTime() + + def verify(self, ta): + """ + Verify CMS wrapper and store inner content. + """ + + try: + cms = self.get_POW() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + if self.print_on_der_error: + rpki.log.debug("Problem parsing DER CMS message, might not really be DER: %r" % + self.get_DER()) + raise rpki.exceptions.UnparsableCMSDER + + if cms.eContentType() != self.econtent_oid: + raise rpki.exceptions.WrongEContentType("Got CMS eContentType %s, expected %s" % ( + cms.eContentType(), self.econtent_oid)) + + certs = [X509(POW = x) for x in cms.certs()] + crls = [CRL(POW = c) for c in cms.crls()] + + if self.debug_cms_certs: + for x in certs: + rpki.log.debug("Received CMS cert issuer %s subject %s SKI %s" % ( + x.getIssuer(), x.getSubject(), x.hSKI())) + for c in crls: + rpki.log.debug("Received CMS CRL issuer %r" % (c.getIssuer(),)) + + store = rpki.POW.X509Store() + + now = rpki.sundial.now() + + trusted_ee = None + + for x in X509.normalize_chain(ta): + if self.debug_cms_certs: + rpki.log.debug("CMS trusted cert issuer %s subject %s SKI %s" % ( + x.getIssuer(), x.getSubject(), x.hSKI())) + if x.getNotAfter() < now: + raise rpki.exceptions.TrustedCMSCertHasExpired("Trusted CMS certificate has expired", + "%s (%s)" % (x.getSubject(), x.hSKI())) + if not x.is_CA(): + if trusted_ee is None: + trusted_ee = x + else: + raise rpki.exceptions.MultipleCMSEECert("Multiple CMS EE certificates", *("%s (%s)" % ( + x.getSubject(), x.hSKI()) for x in ta if not x.is_CA())) + store.addTrust(x.get_POW()) + + if trusted_ee: + if self.debug_cms_certs: + rpki.log.debug("Trusted CMS EE cert issuer %s subject %s SKI %s" % ( + trusted_ee.getIssuer(), trusted_ee.getSubject(), trusted_ee.hSKI())) + if len(certs) > 1 or (len(certs) == 1 and + (certs[0].getSubject() != trusted_ee.getSubject() or + certs[0].getPublicKey() != trusted_ee.getPublicKey())): + raise rpki.exceptions.UnexpectedCMSCerts("Unexpected CMS certificates", *("%s (%s)" % ( + x.getSubject(), x.hSKI()) for x in certs)) + if crls: + raise rpki.exceptions.UnexpectedCMSCRLs("Unexpected CRLs", *("%s (%s)" % ( + c.getIssuer(), c.hAKI()) for c in crls)) + + else: + untrusted_ee = [x for x in certs if not x.is_CA()] + if len(untrusted_ee) < 1: + raise rpki.exceptions.MissingCMSEEcert + if len(untrusted_ee) > 1 or (not self.allow_extra_certs and len(certs) > len(untrusted_ee)): + raise rpki.exceptions.UnexpectedCMSCerts("Unexpected CMS certificates", *("%s (%s)" % ( + x.getSubject(), x.hSKI()) for x in certs)) + if len(crls) < 1: + if self.require_crls: + raise rpki.exceptions.MissingCMSCRL + else: + rpki.log.warn("MISSING CMS CRL! Ignoring per self.require_crls setting") + if len(crls) > 1 and not self.allow_extra_crls: + raise rpki.exceptions.UnexpectedCMSCRLs("Unexpected CRLs", *("%s (%s)" % ( + c.getIssuer(), c.hAKI()) for c in crls)) + + for x in certs: + if x.getNotAfter() < now: + raise rpki.exceptions.CMSCertHasExpired("CMS certificate has expired", "%s (%s)" % ( + x.getSubject(), x.hSKI())) + + for c in crls: + if c.getNextUpdate() < now: + rpki.log.warn("Stale BPKI CMS CRL (%s %s %s)" % (c.getNextUpdate(), c.getIssuer(), c.hAKI())) + + try: + content = cms.verify(store) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + if self.dump_on_verify_failure: + if self.dump_using_dumpasn1: + dbg = self.dumpasn1() + else: + dbg = cms.pprint() + rpki.log.warn("CMS verification failed, dumping ASN.1 (%d octets):" % len(self.get_DER())) + for line in dbg.splitlines(): + rpki.log.warn(line) + raise rpki.exceptions.CMSVerificationFailed("CMS verification failed") + + return content + + def extract(self): + """ + Extract and store inner content from CMS wrapper without verifying + the CMS. + + DANGER WILL ROBINSON!!! + + Do not use this method on unvalidated data. Use the verify() + method instead. + + If you don't understand this warning, don't use this method. + """ + + try: + cms = self.get_POW() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + raise rpki.exceptions.UnparsableCMSDER + + if cms.eContentType() != self.econtent_oid: + raise rpki.exceptions.WrongEContentType("Got CMS eContentType %s, expected %s" % ( + cms.eContentType(), self.econtent_oid)) + + return cms.verify(rpki.POW.X509Store(), None, + (rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | + rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY)) + + + def sign(self, keypair, certs, crls = None, no_certs = False): + """ + Sign and wrap inner content. + """ + + rpki.log.trace() + + if isinstance(certs, X509): + cert = certs + certs = () + else: + cert = certs[0] + certs = certs[1:] + + if crls is None: + crls = () + elif isinstance(crls, CRL): + crls = (crls,) + + if self.debug_cms_certs: + rpki.log.debug("Signing with cert issuer %s subject %s SKI %s" % ( + cert.getIssuer(), cert.getSubject(), cert.hSKI())) + for i, c in enumerate(certs): + rpki.log.debug("Additional cert %d issuer %s subject %s SKI %s" % ( + i, c.getIssuer(), c.getSubject(), c.hSKI())) + + self._sign(cert.get_POW(), + keypair.get_POW(), + [x.get_POW() for x in certs], + [c.get_POW() for c in crls], + rpki.POW.CMS_NOCERTS if no_certs else 0) + + @property + def creation_timestamp(self): + """ + Time at which this object was created. + """ + return self.get_signingTime() + + +class Wrapped_CMS_object(CMS_object): + """ + Abstract class to hold CMS objects wrapping non-DER content (eg, XML + or VCard). + + CMS-wrapped objects are a little different from the other DER_object + types because the signed object is CMS wrapping some other kind of + inner content. A Wrapped_CMS_object is the outer CMS wrapped object + so that the usual DER and PEM operations do the obvious things, and + the inner content is handle via separate methods. + """ + + other_clear = ("content",) + + def get_content(self): + """ + Get the inner content of this Wrapped_CMS_object. + """ + if self.content is None: + raise rpki.exceptions.CMSContentNotSet("Inner content of CMS object %r is not set" % self) + return self.content + + def set_content(self, content): + """ + Set the (inner) content of this Wrapped_CMS_object, clearing the wrapper. + """ + self.clear() + self.content = content + + def verify(self, ta): + """ + Verify CMS wrapper and store inner content. + """ + + self.decode(CMS_object.verify(self, ta)) + return self.get_content() + + def extract(self): + """ + Extract and store inner content from CMS wrapper without verifying + the CMS. + + DANGER WILL ROBINSON!!! + + Do not use this method on unvalidated data. Use the verify() + method instead. + + If you don't understand this warning, don't use this method. + """ + + self.decode(CMS_object.extract(self)) + return self.get_content() + + def extract_if_needed(self): + """ + Extract inner content if needed. See caveats for .extract(), do + not use unless you really know what you are doing. + """ + + if self.content is None: + self.extract() + + def _sign(self, cert, keypair, certs, crls, flags): + """ + Internal method to call POW to do CMS signature. This is split + out from the .sign() API method to handle differences in how + different CMS-based POW classes handle the inner content. + """ + + cms = self.POW_class() + cms.sign(cert, keypair, self.encode(), certs, crls, self.econtent_oid, flags) + self.POW = cms + + +class DER_CMS_object(CMS_object): + """ + Abstract class for CMS-based objects with DER-encoded content + handled by C-level subclasses of rpki.POW.CMS. + """ + + def _sign(self, cert, keypair, certs, crls, flags): + self.get_POW().sign(cert, keypair, certs, crls, self.econtent_oid, flags) + + + def extract_if_needed(self): + """ + Extract inner content if needed. See caveats for .extract(), do + not use unless you really know what you are doing. + """ + + try: + self.get_POW().getVersion() + except rpki.POW.NotVerifiedError: + self.extract() + + +class SignedManifest(DER_CMS_object): + """ + Class to hold a signed manifest. + """ + + econtent_oid = rpki.oids.id_ct_rpkiManifest + POW_class = rpki.POW.Manifest + + def getThisUpdate(self): + """ + Get thisUpdate value from this manifest. + """ + return self.get_POW().getThisUpdate() + + def getNextUpdate(self): + """ + Get nextUpdate value from this manifest. + """ + return self.get_POW().getNextUpdate() + + @classmethod + def build(cls, serial, thisUpdate, nextUpdate, names_and_objs, keypair, certs, version = 0): + """ + Build a signed manifest. + """ + + filelist = [] + for name, obj in names_and_objs: + d = rpki.POW.Digest(rpki.POW.SHA256_DIGEST) + d.update(obj.get_DER()) + filelist.append((name.rpartition("/")[2], d.digest())) + filelist.sort(key = lambda x: x[0]) + + obj = cls.POW_class() + obj.setVersion(version) + obj.setManifestNumber(serial) + obj.setThisUpdate(thisUpdate) + obj.setNextUpdate(nextUpdate) + obj.setAlgorithm(rpki.oids.id_sha256) + obj.addFiles(filelist) + + self = cls(POW = obj) + self.sign(keypair, certs) + return self + +class ROA(DER_CMS_object): + """ + Class to hold a signed ROA. + """ + + econtent_oid = rpki.oids.id_ct_routeOriginAttestation + POW_class = rpki.POW.ROA + + @classmethod + def build(cls, asn, ipv4, ipv6, keypair, certs, version = 0): + """ + Build a ROA. + """ + ipv4 = ipv4.to_POW_roa_tuple() if ipv4 else None + ipv6 = ipv6.to_POW_roa_tuple() if ipv6 else None + obj = cls.POW_class() + obj.setVersion(version) + obj.setASID(asn) + obj.setPrefixes(ipv4 = ipv4, ipv6 = ipv6) + self = cls(POW = obj) + self.sign(keypair, certs) + return self + + def tracking_data(self, uri): + """ + Return a string containing data we want to log when tracking how + objects move through the RPKI system. + """ + msg = DER_CMS_object.tracking_data(self, uri) + try: + self.extract_if_needed() + asn = self.get_POW().getASID() + text = [] + for prefixes in self.get_POW().getPrefixes(): + if prefixes is not None: + for prefix, prefixlen, maxprefixlen in prefixes: + if maxprefixlen is None or prefixlen == maxprefixlen: + text.append("%s/%s" % (prefix, prefixlen)) + else: + text.append("%s/%s-%s" % (prefix, prefixlen, maxprefixlen)) + text.sort() + msg = "%s %s %s" % (msg, asn, ",".join(text)) + except: # pylint: disable=W0702 + pass + return msg + +class DeadDrop(object): + """ + Dead-drop utility for storing copies of CMS messages for debugging or + audit. At the moment this uses Maildir mailbox format, as it has + approximately the right properties and a number of useful tools for + manipulating it already exist. + """ + + def __init__(self, name): + self.name = name + self.pid = os.getpid() + self.maildir = mailbox.Maildir(name, factory = None, create = True) + self.warned = False + + def dump(self, obj): + try: + now = time.time() + msg = email.mime.application.MIMEApplication(obj.get_DER(), "x-rpki") + msg["Date"] = email.utils.formatdate(now) + msg["Subject"] = "Process %s dump of %r" % (self.pid, obj) + msg["Message-ID"] = email.utils.make_msgid() + msg["X-RPKI-PID"] = str(self.pid) + msg["X-RPKI-Object"] = repr(obj) + msg["X-RPKI-Timestamp"] = "%f" % now + self.maildir.add(msg) + self.warned = False + except Exception, e: + if not self.warned: + rpki.log.warn("Could not write to mailbox %s: %s" % (self.name, e)) + self.warned = True + +class XML_CMS_object(Wrapped_CMS_object): + """ + Class to hold CMS-wrapped XML protocol data. + """ + + econtent_oid = rpki.oids.id_ct_xml + + ## @var dump_outbound_cms + # If set, we write all outbound XML-CMS PDUs to disk, for debugging. + # If set, value should be a DeadDrop object. + + dump_outbound_cms = None + + ## @var dump_inbound_cms + # If set, we write all inbound XML-CMS PDUs to disk, for debugging. + # If set, value should be a DeadDrop object. + + dump_inbound_cms = None + + ## @var check_inbound_schema + # If set, perform RelaxNG schema check on inbound messages. + + check_inbound_schema = True + + ## @var check_outbound_schema + # If set, perform RelaxNG schema check on outbound messages. + + check_outbound_schema = False + + def encode(self): + """ + Encode inner content for signing. + """ + return lxml.etree.tostring(self.get_content(), + pretty_print = True, + encoding = self.encoding, + xml_declaration = True) + + def decode(self, xml): + """ + Decode XML and set inner content. + """ + self.content = lxml.etree.fromstring(xml) + + def pretty_print_content(self): + """ + Pretty print XML content of this message. + """ + return lxml.etree.tostring(self.get_content(), + pretty_print = True, + encoding = self.encoding, + xml_declaration = True) + + def schema_check(self): + """ + Handle XML RelaxNG schema check. + """ + try: + self.schema.assertValid(self.get_content()) + except lxml.etree.DocumentInvalid: + rpki.log.error("PDU failed schema check") + for line in self.pretty_print_content().splitlines(): + rpki.log.warn(line) + raise + + def dump_to_disk(self, prefix): + """ + Write DER of current message to disk, for debugging. + """ + f = open(prefix + rpki.sundial.now().isoformat() + "Z.cms", "wb") + f.write(self.get_DER()) + f.close() + + def wrap(self, msg, keypair, certs, crls = None): + """ + Wrap an XML PDU in CMS and return its DER encoding. + """ + rpki.log.trace() + if self.saxify is None: + self.set_content(msg) + else: + self.set_content(msg.toXML()) + if self.check_outbound_schema: + self.schema_check() + self.sign(keypair, certs, crls) + if self.dump_outbound_cms: + self.dump_outbound_cms.dump(self) + return self.get_DER() + + def unwrap(self, ta): + """ + Unwrap a CMS-wrapped XML PDU and return Python objects. + """ + if self.dump_inbound_cms: + self.dump_inbound_cms.dump(self) + self.verify(ta) + if self.check_inbound_schema: + self.schema_check() + if self.saxify is None: + return self.get_content() + else: + return self.saxify(self.get_content()) # pylint: disable=E1102 + + def check_replay(self, timestamp, *context): + """ + Check CMS signing-time in this object against a recorded + timestamp. Raises an exception if the recorded timestamp is more + recent, otherwise returns the new timestamp. + """ + new_timestamp = self.get_signingTime() + if timestamp is not None and timestamp > new_timestamp: + if context: + context = " (" + " ".join(context) + ")" + raise rpki.exceptions.CMSReplay( + "CMS replay: last message %s, this message %s%s" % ( + timestamp, new_timestamp, context)) + return new_timestamp + + def check_replay_sql(self, obj, *context): + """ + Like .check_replay() but gets recorded timestamp from + "last_cms_timestamp" field of an SQL object and stores the new + timestamp back in that same field. + """ + obj.last_cms_timestamp = self.check_replay(obj.last_cms_timestamp, *context) + obj.sql_mark_dirty() + + ## @var saxify + # SAX handler hook. Subclasses can set this to a SAX handler, in + # which case .unwrap() will call it and return the result. + # Otherwise, .unwrap() just returns a verified element tree. + + saxify = None + +class SignedReferral(XML_CMS_object): + encoding = "us-ascii" + schema = rpki.relaxng.myrpki + saxify = None + +class Ghostbuster(Wrapped_CMS_object): + """ + Class to hold Ghostbusters record (CMS-wrapped VCard). This is + quite minimal because we treat the VCard as an opaque byte string + managed by the back-end. + """ + + econtent_oid = rpki.oids.id_ct_rpkiGhostbusters + + def encode(self): + """ + Encode inner content for signing. At the moment we're treating + the VCard as an opaque byte string, so no encoding needed here. + """ + return self.get_content() + + def decode(self, vcard): + """ + Decode XML and set inner content. At the moment we're treating + the VCard as an opaque byte string, so no encoding needed here. + """ + self.content = vcard + + @classmethod + def build(cls, vcard, keypair, certs): + """ + Build a Ghostbuster record. + """ + self = cls() + self.set_content(vcard) + self.sign(keypair, certs) + return self + + +class CRL(DER_object): + """ + Class to hold a Certificate Revocation List. + """ + + POW_class = rpki.POW.CRL + + def get_DER(self): + """ + Get the DER value of this CRL. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this CRL. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.CRL.derRead(self.get_DER()) + return self.POW + + def getThisUpdate(self): + """ + Get thisUpdate value from this CRL. + """ + return self.get_POW().getThisUpdate() + + def getNextUpdate(self): + """ + Get nextUpdate value from this CRL. + """ + return self.get_POW().getNextUpdate() + + def getIssuer(self): + """ + Get issuer value of this CRL. + """ + return X501DN.from_POW(self.get_POW().getIssuer()) + + def getCRLNumber(self): + """ + Get CRL Number value for this CRL. + """ + return self.get_POW().getCRLNumber() + + @classmethod + def generate(cls, keypair, issuer, serial, thisUpdate, nextUpdate, revokedCertificates, version = 1): + """ + Generate a new CRL. + """ + crl = rpki.POW.CRL() + crl.setVersion(version) + crl.setIssuer(issuer.getSubject().get_POW()) + crl.setThisUpdate(thisUpdate) + crl.setNextUpdate(nextUpdate) + crl.setAKI(issuer.get_SKI()) + crl.setCRLNumber(serial) + crl.addRevocations(revokedCertificates) + crl.sign(keypair.get_POW()) + return cls(POW = crl) + + @property + def creation_timestamp(self): + """ + Time at which this object was created. + """ + return self.getThisUpdate() + +## @var uri_dispatch_map +# Map of known URI filename extensions and corresponding classes. + +uri_dispatch_map = { + ".cer" : X509, + ".crl" : CRL, + ".gbr" : Ghostbuster, + ".mft" : SignedManifest, + ".mnf" : SignedManifest, + ".roa" : ROA, + } + +def uri_dispatch(uri): + """ + Return the Python class object corresponding to a given URI. + """ + return uri_dispatch_map[os.path.splitext(uri)[1]] diff --git a/rpki/xml_utils.py b/rpki/xml_utils.py new file mode 100644 index 00000000..f254fd11 --- /dev/null +++ b/rpki/xml_utils.py @@ -0,0 +1,494 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +XML utilities. +""" + +import xml.sax +import lxml.sax +import lxml.etree +import rpki.exceptions + +class sax_handler(xml.sax.handler.ContentHandler): + """ + SAX handler for RPKI protocols. + + This class provides some basic amenities for parsing protocol XML of + the kind we use in the RPKI protocols, including whacking all the + protocol element text into US-ASCII, simplifying accumulation of + text fields, and hiding some of the fun relating to XML namespaces. + + General assumption: by the time this parsing code gets invoked, the + XML has already passed RelaxNG validation, so we only have to check + for errors that the schema can't catch, and we don't have to play as + many XML namespace games. + """ + + def __init__(self): + """ + Initialize SAX handler. + """ + xml.sax.handler.ContentHandler.__init__(self) + self.text = "" + self.stack = [] + + def startElementNS(self, name, qname, attrs): + """ + Redirect startElementNS() events to startElement(). + """ + return self.startElement(name[1], attrs) + + def endElementNS(self, name, qname): + """ + Redirect endElementNS() events to endElement(). + """ + return self.endElement(name[1]) + + def characters(self, content): + """ + Accumulate a chuck of element content (text). + """ + self.text += content + + def startElement(self, name, attrs): + """ + Handle startElement() events. + + We maintain a stack of nested elements under construction so that + we can feed events directly to the current element rather than + having to pass them through all the nesting elements. + + If the stack is empty, this event is for the outermost element, so + we call a virtual method to create the corresponding object and + that's the object we'll be returning as our final result. + """ + + a = dict() + for k, v in attrs.items(): + if isinstance(k, tuple): + if k == ("http://www.w3.org/XML/1998/namespace", "lang"): + k = "xml:lang" + else: + assert k[0] is None + k = k[1] + a[k.encode("ascii")] = v.encode("ascii") + if len(self.stack) == 0: + assert not hasattr(self, "result") + self.result = self.create_top_level(name, a) + self.stack.append(self.result) + self.stack[-1].startElement(self.stack, name, a) + + def endElement(self, name): + """ + Handle endElement() events. Mostly this means handling any + accumulated element text. + """ + text = self.text.encode("ascii").strip() + self.text = "" + self.stack[-1].endElement(self.stack, name, text) + + @classmethod + def saxify(cls, elt): + """ + Create a one-off SAX parser, parse an ETree, return the result. + """ + self = cls() + lxml.sax.saxify(elt, self) + return self.result + + def create_top_level(self, name, attrs): + """ + Handle top-level PDU for this protocol. + """ + assert name == self.name and attrs["version"] == self.version + return self.pdu() + +class base_elt(object): + """ + Virtual base class for XML message elements. The left-right and + publication protocols use this. At least for now, the up-down + protocol does not, due to different design assumptions. + """ + + ## @var attributes + # XML attributes for this element. + attributes = () + + ## @var elements + # XML elements contained by this element. + elements = () + + ## @var booleans + # Boolean attributes (value "yes" or "no") for this element. + booleans = () + + def startElement(self, stack, name, attrs): + """ + Default startElement() handler: just process attributes. + """ + if name not in self.elements: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + + def endElement(self, stack, name, text): + """ + Default endElement() handler: just pop the stack. + """ + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Default toXML() element generator. + """ + return self.make_elt() + + def read_attrs(self, attrs): + """ + Template-driven attribute reader. + """ + for key in self.attributes: + val = attrs.get(key, None) + if isinstance(val, str) and val.isdigit() and not key.endswith("_handle"): + val = long(val) + setattr(self, key, val) + for key in self.booleans: + setattr(self, key, attrs.get(key, False)) + + def make_elt(self): + """ + XML element constructor. + """ + elt = lxml.etree.Element("{%s}%s" % (self.xmlns, self.element_name), nsmap = self.nsmap) + for key in self.attributes: + val = getattr(self, key, None) + if val is not None: + elt.set(key, str(val)) + for key in self.booleans: + if getattr(self, key, False): + elt.set(key, "yes") + return elt + + def make_b64elt(self, elt, name, value): + """ + Constructor for Base64-encoded subelement. + """ + if value is not None and not value.empty(): + lxml.etree.SubElement(elt, "{%s}%s" % (self.xmlns, name), nsmap = self.nsmap).text = value.get_Base64() + + def __str__(self): + """ + Convert a base_elt object to string format. + """ + return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "us-ascii") + + @classmethod + def make_pdu(cls, **kargs): + """ + Generic PDU constructor. + """ + self = cls() + for k, v in kargs.items(): + if isinstance(v, bool): + v = 1 if v else 0 + setattr(self, k, v) + return self + +class text_elt(base_elt): + """ + Virtual base class for XML message elements that contain text. + """ + + ## @var text_attribute + # Name of the class attribute that holds the text value. + text_attribute = None + + def endElement(self, stack, name, text): + """ + Extract text from parsed XML. + """ + base_elt.endElement(self, stack, name, text) + setattr(self, self.text_attribute, text) + + def toXML(self): + """ + Insert text into generated XML. + """ + elt = self.make_elt() + elt.text = getattr(self, self.text_attribute) or None + return elt + +class data_elt(base_elt): + """ + Virtual base class for PDUs that map to SQL objects. These objects + all implement the create/set/get/list/destroy action attribute. + """ + + def endElement(self, stack, name, text): + """ + Default endElement handler for SQL-based objects. This assumes + that sub-elements are Base64-encoded using the sql_template + mechanism. + """ + if name in self.elements: + elt_type = self.sql_template.map.get(name) + assert elt_type is not None, "Couldn't find element type for %s, stack %s" % (name, stack) + setattr(self, name, elt_type(Base64 = text)) + else: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Default element generator for SQL-based objects. This assumes + that sub-elements are Base64-encoded DER objects. + """ + elt = self.make_elt() + for i in self.elements: + self.make_b64elt(elt, i, getattr(self, i, None)) + return elt + + def make_reply(self, r_pdu = None): + """ + Construct a reply PDU. + """ + if r_pdu is None: + r_pdu = self.__class__() + self.make_reply_clone_hook(r_pdu) + handle_name = self.element_name + "_handle" + setattr(r_pdu, handle_name, getattr(self, handle_name, None)) + else: + self.make_reply_clone_hook(r_pdu) + for b in r_pdu.booleans: + setattr(r_pdu, b, False) + r_pdu.action = self.action + r_pdu.tag = self.tag + return r_pdu + + def make_reply_clone_hook(self, r_pdu): + """ + Overridable hook. + """ + pass + + def serve_fetch_one(self): + """ + Find the object on which a get, set, or destroy method should + operate. + """ + r = self.serve_fetch_one_maybe() + if r is None: + raise rpki.exceptions.NotFound + return r + + def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Overridable hook. + """ + cb() + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Overridable hook. + """ + cb() + + def serve_create(self, r_msg, cb, eb): + """ + Handle a create action. + """ + + r_pdu = self.make_reply() + + def one(): + self.sql_store() + setattr(r_pdu, self.sql_template.index, getattr(self, self.sql_template.index)) + self.serve_post_save_hook(self, r_pdu, two, eb) + + def two(): + r_msg.append(r_pdu) + cb() + + oops = self.serve_fetch_one_maybe() + if oops is not None: + raise rpki.exceptions.DuplicateObject, "Object already exists: %r[%r] %r[%r]" % (self, getattr(self, self.element_name + "_handle"), + oops, getattr(oops, oops.element_name + "_handle")) + + self.serve_pre_save_hook(self, r_pdu, one, eb) + + def serve_set(self, r_msg, cb, eb): + """ + Handle a set action. + """ + + db_pdu = self.serve_fetch_one() + r_pdu = self.make_reply() + for a in db_pdu.sql_template.columns[1:]: + v = getattr(self, a, None) + if v is not None: + setattr(db_pdu, a, v) + db_pdu.sql_mark_dirty() + + def one(): + db_pdu.sql_store() + db_pdu.serve_post_save_hook(self, r_pdu, two, eb) + + def two(): + r_msg.append(r_pdu) + cb() + + db_pdu.serve_pre_save_hook(self, r_pdu, one, eb) + + def serve_get(self, r_msg, cb, eb): + """ + Handle a get action. + """ + r_pdu = self.serve_fetch_one() + self.make_reply(r_pdu) + r_msg.append(r_pdu) + cb() + + def serve_list(self, r_msg, cb, eb): + """ + Handle a list action for non-self objects. + """ + for r_pdu in self.serve_fetch_all(): + self.make_reply(r_pdu) + r_msg.append(r_pdu) + cb() + + def serve_destroy_hook(self, cb, eb): + """ + Overridable hook. + """ + cb() + + def serve_destroy(self, r_msg, cb, eb): + """ + Handle a destroy action. + """ + def done(): + db_pdu.sql_delete() + r_msg.append(self.make_reply()) + cb() + db_pdu = self.serve_fetch_one() + db_pdu.serve_destroy_hook(done, eb) + + def serve_dispatch(self, r_msg, cb, eb): + """ + Action dispatch handler. + """ + dispatch = { "create" : self.serve_create, + "set" : self.serve_set, + "get" : self.serve_get, + "list" : self.serve_list, + "destroy" : self.serve_destroy } + if self.action not in dispatch: + raise rpki.exceptions.BadQuery, "Unexpected query: action %s" % self.action + dispatch[self.action](r_msg, cb, eb) + + def unimplemented_control(self, *controls): + """ + Uniform handling for unimplemented control operations. + """ + unimplemented = [x for x in controls if getattr(self, x, False)] + if unimplemented: + raise rpki.exceptions.NotImplementedYet, "Unimplemented control %s" % ", ".join(unimplemented) + +class msg(list): + """ + Generic top-level PDU. + """ + + def startElement(self, stack, name, attrs): + """ + Handle top-level PDU. + """ + if name == "msg": + assert self.version == int(attrs["version"]) + self.type = attrs["type"] + else: + elt = self.pdus[name]() + self.append(elt) + stack.append(elt) + elt.startElement(stack, name, attrs) + + def endElement(self, stack, name, text): + """ + Handle top-level PDU. + """ + assert name == "msg", "Unexpected name %s, stack %s" % (name, stack) + assert len(stack) == 1 + stack.pop() + + def __str__(self): + """ + Convert msg object to string. + """ + return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "us-ascii") + + def toXML(self): + """ + Generate top-level PDU. + """ + elt = lxml.etree.Element("{%s}msg" % (self.xmlns), nsmap = self.nsmap, version = str(self.version), type = self.type) + elt.extend([i.toXML() for i in self]) + return elt + + @classmethod + def query(cls, *args): + """ + Create a query PDU. + """ + self = cls(args) + self.type = "query" + return self + + @classmethod + def reply(cls, *args): + """ + Create a reply PDU. + """ + self = cls(args) + self.type = "reply" + return self + + def is_query(self): + """ + Is this msg a query? + """ + return self.type == "query" + + def is_reply(self): + """ + Is this msg a reply? + """ + return self.type == "reply" |