aboutsummaryrefslogtreecommitdiff
path: root/ca/tests/smoketest.py
diff options
context:
space:
mode:
authorRob Austein <sra@hactrn.net>2015-10-26 06:29:00 +0000
committerRob Austein <sra@hactrn.net>2015-10-26 06:29:00 +0000
commitb46deb1417dc3596e9ac9fe2fe8cc0b7f42457e7 (patch)
treeca0dc0276d1adc168bc3337ce0564c4ec4957c1b /ca/tests/smoketest.py
parent397beaf6d9900dc3b3cb612c89ebf1d57b1d16f6 (diff)
"Any programmer who fails to comply with the standard naming, formatting,
or commenting conventions should be shot. If it so happens that it is inconvenient to shoot him, then he is to be politely requested to recode his program in adherence to the above standard." -- Michael Spier, Digital Equipment Corporation svn path=/branches/tk705/; revision=6152
Diffstat (limited to 'ca/tests/smoketest.py')
-rw-r--r--ca/tests/smoketest.py2196
1 files changed, 1098 insertions, 1098 deletions
diff --git a/ca/tests/smoketest.py b/ca/tests/smoketest.py
index 3960981a..9d82c640 100644
--- a/ca/tests/smoketest.py
+++ b/ca/tests/smoketest.py
@@ -76,14 +76,14 @@ yaml_script = [y for y in yaml.safe_load_all(args.yaml_file)]
# Define port allocator early, so we can use it while reading config
def allocate_port():
- """
- Allocate a TCP port number.
- """
+ """
+ Allocate a TCP port number.
+ """
- global base_port
- p = base_port
- base_port += 1
- return p
+ global base_port
+ p = base_port
+ base_port += 1
+ return p
# Most filenames in the following are relative to the working directory.
@@ -139,14 +139,14 @@ pubd_last_cms_time = None
ecdsa_params = None
class CantRekeyYAMLLeaf(Exception):
- """
- Can't rekey YAML leaf.
- """
+ """
+ Can't rekey YAML leaf.
+ """
class CouldntIssueBSCEECertificate(Exception):
- """
- Couldn't issue BSC EE certificate
- """
+ """
+ Couldn't issue BSC EE certificate
+ """
sql_conversions = MySQLdb.converters.conversions.copy()
sql_conversions.update({
@@ -154,202 +154,202 @@ sql_conversions.update({
MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None })
def main():
- """
- Main program.
- """
-
- rpki.log.init(smoketest_name, argparse.Namespace(log_level = logging.DEBUG,
- log_handler = lambda: logging.StreamHandler(sys.stdout)))
- logger.info("Starting")
-
- rpki.http.http_client.timeout = rpki.sundial.timedelta(hours = 1)
-
- pubd_process = None
- rootd_process = None
- rsyncd_process = None
-
- rpki_sql = mangle_sql(rpki_sql_file)
- irdb_sql = mangle_sql(irdb_sql_file)
- pubd_sql = mangle_sql(pub_sql_file)
-
- logger.info("Initializing test directory")
-
- # Connect to test directory, creating it if necessary
- try:
- os.chdir(smoketest_dir)
- except OSError:
- os.makedirs(smoketest_dir)
- os.chdir(smoketest_dir)
-
- # Now that we're in the right directory, we can figure out whether
- # we have a private openssl executable to use
- global prog_openssl
- if not os.path.exists(prog_openssl):
- prog_openssl = "openssl"
-
- # Discard everything but keys, which take a while to generate.
- # Apparently os.walk() can't tell the difference between directories
- # and symlinks to directories, so we have to handle both.
- for root, dirs, files in os.walk(".", topdown = False):
- for fn in files:
- if not fn.endswith(".key"):
- os.remove(os.path.join(root, fn))
- for d in dirs:
- try:
- os.rmdir(os.path.join(root, d))
- except OSError, e:
- if e.errno == errno.ENOTDIR:
- os.remove(os.path.join(root, d))
- else:
- raise
-
- logger.info("Reading master YAML configuration")
- y = yaml_script.pop(0)
+ """
+ Main program.
+ """
- logger.info("Constructing internal allocation database")
- db = allocation_db(y)
+ rpki.log.init(smoketest_name, argparse.Namespace(log_level = logging.DEBUG,
+ log_handler = lambda: logging.StreamHandler(sys.stdout)))
+ logger.info("Starting")
- logger.info("Constructing BPKI keys and certs for rootd")
- setup_bpki_cert_chain(rootd_name, ee = ("RPKI",))
+ rpki.http.http_client.timeout = rpki.sundial.timedelta(hours = 1)
- logger.info("Constructing BPKI keys and certs for pubd")
- setup_bpki_cert_chain(pubd_name, ee = ("PUBD", "IRBE"))
+ pubd_process = None
+ rootd_process = None
+ rsyncd_process = None
+ rpki_sql = mangle_sql(rpki_sql_file)
+ irdb_sql = mangle_sql(irdb_sql_file)
+ pubd_sql = mangle_sql(pub_sql_file)
- for a in db:
- a.setup_bpki_certs()
+ logger.info("Initializing test directory")
- setup_publication(pubd_sql, db.root.irdb_db_name)
- setup_rootd(db.root, y.get("rootd", {}), db)
- setup_rsyncd()
- setup_rcynic()
+ # Connect to test directory, creating it if necessary
+ try:
+ os.chdir(smoketest_dir)
+ except OSError:
+ os.makedirs(smoketest_dir)
+ os.chdir(smoketest_dir)
+
+ # Now that we're in the right directory, we can figure out whether
+ # we have a private openssl executable to use
+ global prog_openssl
+ if not os.path.exists(prog_openssl):
+ prog_openssl = "openssl"
+
+ # Discard everything but keys, which take a while to generate.
+ # Apparently os.walk() can't tell the difference between directories
+ # and symlinks to directories, so we have to handle both.
+ for root, dirs, files in os.walk(".", topdown = False):
+ for fn in files:
+ if not fn.endswith(".key"):
+ os.remove(os.path.join(root, fn))
+ for d in dirs:
+ try:
+ os.rmdir(os.path.join(root, d))
+ except OSError, e:
+ if e.errno == errno.ENOTDIR:
+ os.remove(os.path.join(root, d))
+ else:
+ raise
+
+ logger.info("Reading master YAML configuration")
+ y = yaml_script.pop(0)
+
+ logger.info("Constructing internal allocation database")
+ db = allocation_db(y)
+
+ logger.info("Constructing BPKI keys and certs for rootd")
+ setup_bpki_cert_chain(rootd_name, ee = ("RPKI",))
+
+ logger.info("Constructing BPKI keys and certs for pubd")
+ setup_bpki_cert_chain(pubd_name, ee = ("PUBD", "IRBE"))
+
+
+ for a in db:
+ a.setup_bpki_certs()
+
+ setup_publication(pubd_sql, db.root.irdb_db_name)
+ setup_rootd(db.root, y.get("rootd", {}), db)
+ setup_rsyncd()
+ setup_rcynic()
- for a in db.engines:
- a.setup_conf_file()
- a.setup_sql(rpki_sql, irdb_sql)
- a.sync_sql()
+ for a in db.engines:
+ a.setup_conf_file()
+ a.setup_sql(rpki_sql, irdb_sql)
+ a.sync_sql()
- try:
+ try:
- logger.info("Starting rootd")
- rootd_process = subprocess.Popen((prog_python, prog_rootd, "--foreground", "--log-stdout", "--log-level", "debug"),
- env = dict(os.environ, RPKI_CONF = rootd_name + ".conf"))
+ logger.info("Starting rootd")
+ rootd_process = subprocess.Popen((prog_python, prog_rootd, "--foreground", "--log-stdout", "--log-level", "debug"),
+ env = dict(os.environ, RPKI_CONF = rootd_name + ".conf"))
- logger.info("Starting pubd")
- pubd_process = subprocess.Popen((prog_python, prog_pubd, "--foreground", "--log-stdout", "--log-level", "debug") +
- (("-p", pubd_name + ".prof") if args.profile else ()),
- env = dict(os.environ, RPKI_CONF = pubd_name + ".conf"))
+ logger.info("Starting pubd")
+ pubd_process = subprocess.Popen((prog_python, prog_pubd, "--foreground", "--log-stdout", "--log-level", "debug") +
+ (("-p", pubd_name + ".prof") if args.profile else ()),
+ env = dict(os.environ, RPKI_CONF = pubd_name + ".conf"))
- logger.info("Starting rsyncd")
- rsyncd_process = subprocess.Popen((prog_rsyncd, "--daemon", "--no-detach", "--config", rsyncd_name + ".conf"))
+ logger.info("Starting rsyncd")
+ rsyncd_process = subprocess.Popen((prog_rsyncd, "--daemon", "--no-detach", "--config", rsyncd_name + ".conf"))
- # Start rpkid and irdbd instances
- for a in db.engines:
- a.run_daemons()
+ # Start rpkid and irdbd instances
+ for a in db.engines:
+ a.run_daemons()
- # From this point on we'll be running event-driven, so the rest of
- # the code until final exit is all closures.
+ # From this point on we'll be running event-driven, so the rest of
+ # the code until final exit is all closures.
- def start():
- rpki.async.iterator(db.engines, create_rpki_objects, create_pubd_objects)
+ def start():
+ rpki.async.iterator(db.engines, create_rpki_objects, create_pubd_objects)
- def create_rpki_objects(iterator, a):
- a.create_rpki_objects(iterator)
+ def create_rpki_objects(iterator, a):
+ a.create_rpki_objects(iterator)
- def create_pubd_objects():
- call_pubd([rpki.publication_control.client_elt.make_pdu(action = "create",
- client_handle = db.root.client_handle + "-" + rootd_name,
- base_uri = rootd_sia,
- bpki_cert = cross_certify(rootd_name + "-TA", pubd_name + "-TA"))],
- cb = lambda ignored: yaml_loop())
+ def create_pubd_objects():
+ call_pubd([rpki.publication_control.client_elt.make_pdu(action = "create",
+ client_handle = db.root.client_handle + "-" + rootd_name,
+ base_uri = rootd_sia,
+ bpki_cert = cross_certify(rootd_name + "-TA", pubd_name + "-TA"))],
+ cb = lambda ignored: yaml_loop())
- def yaml_loop():
+ def yaml_loop():
- # This is probably where we should be updating expired BPKI
- # objects, particular CRLs
+ # This is probably where we should be updating expired BPKI
+ # objects, particular CRLs
- logger.info("Running cron for all RPKI engines")
- rpki.async.iterator(db.engines, run_cron, run_yaml)
+ logger.info("Running cron for all RPKI engines")
+ rpki.async.iterator(db.engines, run_cron, run_yaml)
- def run_cron(iterator, a):
- a.run_cron(iterator)
+ def run_cron(iterator, a):
+ a.run_cron(iterator)
- def run_yaml():
+ def run_yaml():
- # Run rcynic to check results
- run_rcynic()
+ # Run rcynic to check results
+ run_rcynic()
- # Apply next delta if we have one; otherwise, we're done.
- if yaml_script:
- logger.info("Applying deltas")
- db.apply_delta(yaml_script.pop(0), apply_delta_done)
- else:
- logger.info("No more deltas to apply, done")
- rpki.async.exit_event_loop()
+ # Apply next delta if we have one; otherwise, we're done.
+ if yaml_script:
+ logger.info("Applying deltas")
+ db.apply_delta(yaml_script.pop(0), apply_delta_done)
+ else:
+ logger.info("No more deltas to apply, done")
+ rpki.async.exit_event_loop()
- def apply_delta_done():
+ def apply_delta_done():
- # Resync IRDBs
- for a in db.engines:
- a.sync_sql()
+ # Resync IRDBs
+ for a in db.engines:
+ a.sync_sql()
- # Loop until we run out of control YAML
- yaml_loop()
+ # Loop until we run out of control YAML
+ yaml_loop()
- logger.info("Sleeping %d seconds while daemons start up", startup_delay)
- rpki.async.timer(start).set(rpki.sundial.timedelta(seconds = startup_delay))
- rpki.async.event_loop()
+ logger.info("Sleeping %d seconds while daemons start up", startup_delay)
+ rpki.async.timer(start).set(rpki.sundial.timedelta(seconds = startup_delay))
+ rpki.async.event_loop()
- # At this point we have gone into event-driven code.
+ # At this point we have gone into event-driven code.
- logger.info("Event loop exited normally")
+ logger.info("Event loop exited normally")
- except Exception, e:
- logger.exception("Event loop exited with an exception")
+ except Exception, e:
+ logger.exception("Event loop exited with an exception")
- finally:
- logger.info("Cleaning up")
- for a in db.engines:
- a.kill_daemons()
- for proc, name in ((rootd_process, "rootd"),
- (pubd_process, "pubd"),
- (rsyncd_process, "rsyncd")):
- # pylint: disable=E1103
- if proc is not None and proc.poll() is None:
- logger.info("Killing %s, pid %s", name, proc.pid)
- try:
- proc.terminate()
- except OSError:
- pass
- if proc is not None:
- logger.info("Daemon %s, pid %s exited with code %s", name, proc.pid, proc.wait())
+ finally:
+ logger.info("Cleaning up")
+ for a in db.engines:
+ a.kill_daemons()
+ for proc, name in ((rootd_process, "rootd"),
+ (pubd_process, "pubd"),
+ (rsyncd_process, "rsyncd")):
+ # pylint: disable=E1103
+ if proc is not None and proc.poll() is None:
+ logger.info("Killing %s, pid %s", name, proc.pid)
+ try:
+ proc.terminate()
+ except OSError:
+ pass
+ if proc is not None:
+ logger.info("Daemon %s, pid %s exited with code %s", name, proc.pid, proc.wait())
def cmd_sleep(cb, interval):
- """
- Set an alarm, then wait for it to go off.
- """
+ """
+ Set an alarm, then wait for it to go off.
+ """
- howlong = rpki.sundial.timedelta.parse(interval)
- logger.info("Sleeping %r", howlong)
- rpki.async.timer(cb).set(howlong)
+ howlong = rpki.sundial.timedelta.parse(interval)
+ logger.info("Sleeping %r", howlong)
+ rpki.async.timer(cb).set(howlong)
def cmd_shell(cb, *cmd):
- """
- Run a shell command.
- """
+ """
+ Run a shell command.
+ """
- cmd = " ".join(cmd)
- status = subprocess.call(cmd, shell = True)
- logger.info("Shell command returned status %d", status)
- cb()
+ cmd = " ".join(cmd)
+ status = subprocess.call(cmd, shell = True)
+ logger.info("Shell command returned status %d", status)
+ cb()
def cmd_echo(cb, *words):
- """
- Echo some text to the log.
- """
+ """
+ Echo some text to the log.
+ """
- logger.info(" ".join(words))
- cb()
+ logger.info(" ".join(words))
+ cb()
## @var cmds
# Dispatch table for commands embedded in delta sections
@@ -359,1019 +359,1019 @@ cmds = { "sleep" : cmd_sleep,
"echo" : cmd_echo }
class roa_request(object):
- """
- Representation for a roa_request object.
- """
-
- def __init__(self, asn, ipv4, ipv6):
- self.asn = asn
- self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
- self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
-
- def __eq__(self, other):
- return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
-
- def __hash__(self):
- v4 = tuple(self.v4) if self.v4 is not None else None
- v6 = tuple(self.v6) if self.v6 is not None else None
- return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
-
- def __str__(self):
- if self.v4 and self.v6: s = str(self.v4) + "," + str(self.v6)
- elif self.v4: s = str(self.v4)
- else: s = str(self.v6)
- return "%s: %s" % (self.asn, s)
-
- @classmethod
- def parse(cls, yaml):
- return cls(yaml.get("asn"), yaml.get("ipv4"), yaml.get("ipv6"))
-
-class router_cert(object):
- """
- Representation for a router_cert object.
- """
-
- _ecparams = None
- _keypair = None
- _pkcs10 = None
- _gski = None
-
- @classmethod
- def ecparams(cls):
- if cls._ecparams is None:
- cls._ecparams = rpki.x509.KeyParams.generateEC()
- return cls._ecparams
-
- def __init__(self, asn, router_id):
- self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
- self.router_id = router_id
- self.cn = "ROUTER-%08x" % self.asn[0].min
- self.sn = "%08x" % self.router_id
- self.eku = rpki.oids.id_kp_bgpsec_router
-
- @property
- def keypair(self):
- if self._keypair is None:
- self._keypair = rpki.x509.ECDSA.generate(self.ecparams())
- return self._keypair
-
- @property
- def pkcs10(self):
- if self._pkcs10 is None:
- self._pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
- return self._pkcs10
-
- @property
- def gski(self):
- if self._gski is None:
- self._gski = self.pkcs10.gSKI()
- return self._gski
-
- def __eq__(self, other):
- return self.asn == other.asn and self.sn == other.sn
-
- def __hash__(self):
- return tuple(self.asn).__hash__() + self.cn.__hash__() + self.sn.__hash__()
-
- def __str__(self):
- return "%s: %s,%s: %s" % (self.asn, self.cn, self.sn, self.gski)
-
- @classmethod
- def parse(cls, yaml):
- return cls(yaml.get("asn"), yaml.get("router_id"))
-
-class allocation_db(list):
- """
- Representation of all the entities and allocations in the test
- system. Almost everything is generated out of this database.
- """
-
- def __init__(self, yaml):
- """
- Initialize database from the (first) YAML document.
"""
-
- list.__init__(self)
- self.root = allocation(yaml, self)
- assert self.root.is_root
- if self.root.crl_interval is None:
- self.root.crl_interval = rpki.sundial.timedelta.parse(cfg.get("crl_interval", "1d")).convert_to_seconds()
- if self.root.regen_margin is None:
- self.root.regen_margin = rpki.sundial.timedelta.parse(cfg.get("regen_margin", "1d")).convert_to_seconds()
- for a in self:
- if a.sia_base is None:
- a.sia_base = (rootd_sia + "root/trunk/" if a.is_root else a.parent.sia_base) + a.name + "/"
- if a.base.valid_until is None:
- a.base.valid_until = a.parent.base.valid_until
- if a.crl_interval is None:
- a.crl_interval = a.parent.crl_interval
- if a.regen_margin is None:
- a.regen_margin = a.parent.regen_margin
- a.client_handle = "/".join(a.sia_base.split("/")[4:]).rstrip("/")
- self.root.closure()
- self.map = dict((a.name, a) for a in self)
- self.engines = [a for a in self if a.is_engine]
- for i, a in enumerate(self.engines):
- a.set_engine_number(i)
- for a in self:
- if a.is_hosted:
- a.hosted_by = self.map[a.hosted_by]
- a.hosted_by.hosts.append(a)
- assert a.is_twig, "%s is not twig" % a.name
- assert not a.hosted_by.is_hosted, "%s is hosted by a hosted entity" % a.name
-
- def apply_delta(self, delta, cb):
+ Representation for a roa_request object.
"""
- Apply a delta or run a command.
- """
-
- def loop(iterator, d):
- if isinstance(d, str):
- c = d.split()
- cmds[c[0]](iterator, *c[1:])
- else:
- self.map[d["name"]].apply_delta(d, iterator)
- def done():
- self.root.closure()
- cb()
+ def __init__(self, asn, ipv4, ipv6):
+ self.asn = asn
+ self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
+ self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
- if delta is None:
- cb()
- else:
- rpki.async.iterator(delta, loop, done)
+ def __eq__(self, other):
+ return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
- def dump(self):
- """
- Print content of the database.
- """
+ def __hash__(self):
+ v4 = tuple(self.v4) if self.v4 is not None else None
+ v6 = tuple(self.v6) if self.v6 is not None else None
+ return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
- for a in self:
- print a
+ def __str__(self):
+ if self.v4 and self.v6: s = str(self.v4) + "," + str(self.v6)
+ elif self.v4: s = str(self.v4)
+ else: s = str(self.v6)
+ return "%s: %s" % (self.asn, s)
-class allocation(object):
-
- parent = None
- irdb_db_name = None
- irdb_port = None
- rpki_db_name = None
- rpki_port = None
- crl_interval = None
- regen_margin = None
- last_cms_time = None
- rpkid_process = None
- irdbd_process = None
-
- def __init__(self, yaml, db, parent = None):
- """
- Initialize one entity and insert it into the database.
- """
+ @classmethod
+ def parse(cls, yaml):
+ return cls(yaml.get("asn"), yaml.get("ipv4"), yaml.get("ipv6"))
- db.append(self)
- self.name = yaml["name"]
- self.parent = parent
- self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
- valid_until = None
- if "valid_until" in yaml:
- valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until"))
- if valid_until is None and "valid_for" in yaml:
- valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
- self.base = rpki.resource_set.resource_bag(
- asn = rpki.resource_set.resource_set_as(yaml.get("asn")),
- v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
- v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
- valid_until = valid_until)
- self.sia_base = yaml.get("sia_base")
- if "crl_interval" in yaml:
- self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
- if "regen_margin" in yaml:
- self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
- self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
- for r in self.roa_requests:
- if r.v4:
- self.base.v4 |= r.v4.to_resource_set()
- if r.v6:
- self.base.v6 |= r.v6.to_resource_set()
- self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())]
- for r in self.router_certs:
- self.base.asn |= r.asn
- self.hosted_by = yaml.get("hosted_by")
- self.extra_conf = yaml.get("extra_conf", [])
- self.hosts = []
-
- def closure(self):
+class router_cert(object):
"""
- Compute the transitive resource closure.
+ Representation for a router_cert object.
"""
- resources = self.base
- for kid in self.kids:
- resources |= kid.closure()
- self.resources = resources
- return resources
+ _ecparams = None
+ _keypair = None
+ _pkcs10 = None
+ _gski = None
+
+ @classmethod
+ def ecparams(cls):
+ if cls._ecparams is None:
+ cls._ecparams = rpki.x509.KeyParams.generateEC()
+ return cls._ecparams
+
+ def __init__(self, asn, router_id):
+ self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
+ self.router_id = router_id
+ self.cn = "ROUTER-%08x" % self.asn[0].min
+ self.sn = "%08x" % self.router_id
+ self.eku = rpki.oids.id_kp_bgpsec_router
+
+ @property
+ def keypair(self):
+ if self._keypair is None:
+ self._keypair = rpki.x509.ECDSA.generate(self.ecparams())
+ return self._keypair
+
+ @property
+ def pkcs10(self):
+ if self._pkcs10 is None:
+ self._pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
+ return self._pkcs10
+
+ @property
+ def gski(self):
+ if self._gski is None:
+ self._gski = self.pkcs10.gSKI()
+ return self._gski
+
+ def __eq__(self, other):
+ return self.asn == other.asn and self.sn == other.sn
+
+ def __hash__(self):
+ return tuple(self.asn).__hash__() + self.cn.__hash__() + self.sn.__hash__()
+
+ def __str__(self):
+ return "%s: %s,%s: %s" % (self.asn, self.cn, self.sn, self.gski)
+
+ @classmethod
+ def parse(cls, yaml):
+ return cls(yaml.get("asn"), yaml.get("router_id"))
- def apply_delta(self, yaml, cb):
+class allocation_db(list):
"""
- Apply deltas to this entity.
+ Representation of all the entities and allocations in the test
+ system. Almost everything is generated out of this database.
"""
- logger.info("Applying delta: %s", yaml)
-
- def loop(iterator, kv):
- if kv[0] == "name":
- iterator()
- else:
- getattr(self, "apply_" + kv[0])(kv[1], iterator)
-
- rpki.async.iterator(yaml.items(), loop, cb)
-
- def apply_add_as(self, text, cb):
- self.base.asn |= rpki.resource_set.resource_set_as(text)
- cb()
-
- def apply_add_v4(self, text, cb):
- self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
- cb()
-
- def apply_add_v6(self, text, cb):
- self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
- cb()
-
- def apply_sub_as(self, text, cb):
- self.base.asn |= rpki.resource_set.resource_set_as(text)
- cb()
+ def __init__(self, yaml):
+ """
+ Initialize database from the (first) YAML document.
+ """
+
+ list.__init__(self)
+ self.root = allocation(yaml, self)
+ assert self.root.is_root
+ if self.root.crl_interval is None:
+ self.root.crl_interval = rpki.sundial.timedelta.parse(cfg.get("crl_interval", "1d")).convert_to_seconds()
+ if self.root.regen_margin is None:
+ self.root.regen_margin = rpki.sundial.timedelta.parse(cfg.get("regen_margin", "1d")).convert_to_seconds()
+ for a in self:
+ if a.sia_base is None:
+ a.sia_base = (rootd_sia + "root/trunk/" if a.is_root else a.parent.sia_base) + a.name + "/"
+ if a.base.valid_until is None:
+ a.base.valid_until = a.parent.base.valid_until
+ if a.crl_interval is None:
+ a.crl_interval = a.parent.crl_interval
+ if a.regen_margin is None:
+ a.regen_margin = a.parent.regen_margin
+ a.client_handle = "/".join(a.sia_base.split("/")[4:]).rstrip("/")
+ self.root.closure()
+ self.map = dict((a.name, a) for a in self)
+ self.engines = [a for a in self if a.is_engine]
+ for i, a in enumerate(self.engines):
+ a.set_engine_number(i)
+ for a in self:
+ if a.is_hosted:
+ a.hosted_by = self.map[a.hosted_by]
+ a.hosted_by.hosts.append(a)
+ assert a.is_twig, "%s is not twig" % a.name
+ assert not a.hosted_by.is_hosted, "%s is hosted by a hosted entity" % a.name
+
+ def apply_delta(self, delta, cb):
+ """
+ Apply a delta or run a command.
+ """
+
+ def loop(iterator, d):
+ if isinstance(d, str):
+ c = d.split()
+ cmds[c[0]](iterator, *c[1:])
+ else:
+ self.map[d["name"]].apply_delta(d, iterator)
+
+ def done():
+ self.root.closure()
+ cb()
+
+ if delta is None:
+ cb()
+ else:
+ rpki.async.iterator(delta, loop, done)
- def apply_sub_v4(self, text, cb):
- self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
- cb()
+ def dump(self):
+ """
+ Print content of the database.
+ """
- def apply_sub_v6(self, text, cb):
- self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
- cb()
+ for a in self:
+ print a
- def apply_valid_until(self, stamp, cb):
- self.base.valid_until = rpki.sundial.datetime.from_datetime(stamp)
- cb()
+class allocation(object):
- def apply_valid_for(self, text, cb):
- self.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(text)
- cb()
+ parent = None
+ irdb_db_name = None
+ irdb_port = None
+ rpki_db_name = None
+ rpki_port = None
+ crl_interval = None
+ regen_margin = None
+ last_cms_time = None
+ rpkid_process = None
+ irdbd_process = None
+
+ def __init__(self, yaml, db, parent = None):
+ """
+ Initialize one entity and insert it into the database.
+ """
+
+ db.append(self)
+ self.name = yaml["name"]
+ self.parent = parent
+ self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
+ valid_until = None
+ if "valid_until" in yaml:
+ valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until"))
+ if valid_until is None and "valid_for" in yaml:
+ valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
+ self.base = rpki.resource_set.resource_bag(
+ asn = rpki.resource_set.resource_set_as(yaml.get("asn")),
+ v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
+ v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
+ valid_until = valid_until)
+ self.sia_base = yaml.get("sia_base")
+ if "crl_interval" in yaml:
+ self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
+ if "regen_margin" in yaml:
+ self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
+ self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
+ for r in self.roa_requests:
+ if r.v4:
+ self.base.v4 |= r.v4.to_resource_set()
+ if r.v6:
+ self.base.v6 |= r.v6.to_resource_set()
+ self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())]
+ for r in self.router_certs:
+ self.base.asn |= r.asn
+ self.hosted_by = yaml.get("hosted_by")
+ self.extra_conf = yaml.get("extra_conf", [])
+ self.hosts = []
+
+ def closure(self):
+ """
+ Compute the transitive resource closure.
+ """
+
+ resources = self.base
+ for kid in self.kids:
+ resources |= kid.closure()
+ self.resources = resources
+ return resources
+
+ def apply_delta(self, yaml, cb):
+ """
+ Apply deltas to this entity.
+ """
+
+ logger.info("Applying delta: %s", yaml)
+
+ def loop(iterator, kv):
+ if kv[0] == "name":
+ iterator()
+ else:
+ getattr(self, "apply_" + kv[0])(kv[1], iterator)
+
+ rpki.async.iterator(yaml.items(), loop, cb)
+
+ def apply_add_as(self, text, cb):
+ self.base.asn |= rpki.resource_set.resource_set_as(text)
+ cb()
+
+ def apply_add_v4(self, text, cb):
+ self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
+ cb()
+
+ def apply_add_v6(self, text, cb):
+ self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
+ cb()
+
+ def apply_sub_as(self, text, cb):
+ self.base.asn |= rpki.resource_set.resource_set_as(text)
+ cb()
+
+ def apply_sub_v4(self, text, cb):
+ self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
+ cb()
+
+ def apply_sub_v6(self, text, cb):
+ self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
+ cb()
+
+ def apply_valid_until(self, stamp, cb):
+ self.base.valid_until = rpki.sundial.datetime.from_datetime(stamp)
+ cb()
+
+ def apply_valid_for(self, text, cb):
+ self.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(text)
+ cb()
+
+ def apply_valid_add(self, text, cb):
+ self.base.valid_until += rpki.sundial.timedelta.parse(text)
+ cb()
+
+ def apply_valid_sub(self, text, cb):
+ self.base.valid_until -= rpki.sundial.timedelta.parse(text)
+ cb()
+
+ def apply_roa_request_add(self, yaml, cb):
+ for y in yaml:
+ r = roa_request.parse(y)
+ if r not in self.roa_requests:
+ self.roa_requests.append(r)
+ cb()
+
+ def apply_roa_request_del(self, yaml, cb):
+ for y in yaml:
+ r = roa_request.parse(y)
+ if r in self.roa_requests:
+ self.roa_requests.remove(r)
+ cb()
+
+ def apply_router_cert_add(self, yaml, cb):
+ for y in yaml:
+ r = router_cert.parse(y)
+ if r not in self.router_certs:
+ self.router_certs.append(r)
+ cb()
+
+ def apply_router_cert_del(self, yaml, cb):
+ for y in yaml:
+ r = router_cert.parse(y)
+ if r in self.router_certs:
+ self.router_certs.remove(r)
+ cb()
+
+ def apply_rekey(self, target, cb):
+
+ def done(e):
+ if isinstance(e, Exception):
+ logger.exception("Exception while rekeying %s", self.name)
+ raise e
+ cb()
+
+ if target is None:
+ logger.info("Rekeying <tenant/> %s", self.name)
+ self.call_rpkid([rpki.left_right.self_elt.make_pdu(
+ action = "set", self_handle = self.name, rekey = "yes")], cb = done)
+ else:
+ logger.info("Rekeying <parent/> %s %s", self.name, target)
+ self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
+ action = "set", self_handle = self.name, parent_handle = target, rekey = "yes")], cb = done)
+
+ def apply_revoke(self, target, cb):
+
+ def done(e):
+ if isinstance(e, Exception):
+ logger.exception("Exception while revoking %s", self.name)
+ raise e
+ cb()
+
+ if target is None:
+ logger.info("Revoking <tenant/> %s", self.name)
+ self.call_rpkid([rpki.left_right.self_elt.make_pdu(
+ action = "set", self_handle = self.name, revoke = "yes")], cb = done)
+ else:
+ logger.info("Revoking <parent/> %s %s", self.name, target)
+ self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
+ action = "set", self_handle = self.name, parent_handle = target, revoke = "yes")], cb = done)
+
+ def __str__(self):
+ s = self.name + "\n"
+ if self.resources.asn: s += " ASN: %s\n" % self.resources.asn
+ if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
+ if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
+ if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
+ if self.parent: s += " Up: %s\n" % self.parent.name
+ if self.sia_base: s += " SIA: %s\n" % self.sia_base
+ return s + "Until: %s\n" % self.resources.valid_until
+
+
+ @property
+ def is_root(self):
+ return self.parent is None
+
+ @property
+ def is_twig(self):
+ return not self.is_root
+
+ @property
+ def is_hosted(self):
+ return self.hosted_by is not None
+
+ @property
+ def is_engine(self):
+ return not self.is_hosted
+
+ def set_engine_number(self, n):
+ """
+ Set the engine number for this entity.
+ """
+
+ self.irdb_db_name = "irdb%d" % n
+ self.irdb_port = allocate_port()
+ self.rpki_db_name = "rpki%d" % n
+ self.rpki_port = allocate_port()
+
+ def get_rpki_port(self):
+ """
+ Get rpki port to use for this entity.
+ """
+
+ if self.is_hosted:
+ assert self.hosted_by.rpki_port is not None
+ return self.hosted_by.rpki_port
+ else:
+ assert self.rpki_port is not None
+ return self.rpki_port
+
+ def setup_bpki_certs(self):
+ """
+ Create BPKI certificates for this entity.
+ """
+
+ logger.info("Constructing BPKI keys and certs for %s", self.name)
+ setup_bpki_cert_chain(name = self.name,
+ ee = ("RPKI", "IRDB", "IRBE"),
+ ca = ("SELF",))
+ self.rpkid_ta = rpki.x509.X509(PEM_file = self.name + "-TA.cer")
+ self.irbe_key = rpki.x509.RSA( PEM_file = self.name + "-IRBE.key")
+ self.irbe_cert = rpki.x509.X509(PEM_file = self.name + "-IRBE.cer")
+ self.rpkid_cert = rpki.x509.X509(PEM_file = self.name + "-RPKI.cer")
+
+ def setup_conf_file(self):
+ """
+ Write config files for this entity.
+ """
+
+ logger.info("Writing config files for %s", self.name)
+ assert self.rpki_port is not None
+ d = dict(my_name = self.name,
+ irdb_db_name = self.irdb_db_name,
+ irdb_db_pass = irdb_db_pass,
+ irdb_port = self.irdb_port,
+ rpki_db_name = self.rpki_db_name,
+ rpki_db_pass = rpki_db_pass,
+ rpki_port = self.rpki_port)
+ f = open(self.name + ".conf", "w")
+ f.write(conf_fmt_1 % d)
+ for line in self.extra_conf:
+ f.write(line + "\n")
+ f.close()
+
+ def setup_sql(self, rpki_sql, irdb_sql):
+ """
+ Set up this entity's IRDB.
+ """
+
+ logger.info("Setting up MySQL for %s", self.name)
+ db = MySQLdb.connect(user = "rpki", db = self.rpki_db_name, passwd = rpki_db_pass,
+ conv = sql_conversions)
+ cur = db.cursor()
+ db.autocommit(True)
+ for sql in rpki_sql:
+ try:
+ cur.execute(sql)
+ except:
+ if "DROP TABLE IF EXISTS" not in sql.upper():
+ raise
+ db.close()
+ db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
+ conv = sql_conversions)
+ cur = db.cursor()
+ db.autocommit(True)
+ for sql in irdb_sql:
+ try:
+ cur.execute(sql)
+ except:
+ if "DROP TABLE IF EXISTS" not in sql.upper():
+ raise
+ for s in [self] + self.hosts:
+ for kid in s.kids:
+ cur.execute("INSERT registrant (registrant_handle, registry_handle, valid_until) VALUES (%s, %s, %s)",
+ (kid.name, s.name, kid.resources.valid_until))
+ db.close()
+
+ def sync_sql(self):
+ """
+ Whack this entity's IRDB to match our master database. We do this
+ once during setup, then do it again every time we apply a delta to
+ this entity.
+ """
+
+ logger.info("Updating MySQL data for IRDB %s", self.name)
+ db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
+ conv = sql_conversions)
+ cur = db.cursor()
+ db.autocommit(True)
+ cur.execute("DELETE FROM registrant_asn")
+ cur.execute("DELETE FROM registrant_net")
+ cur.execute("DELETE FROM roa_request_prefix")
+ cur.execute("DELETE FROM roa_request")
+ cur.execute("DELETE FROM ee_certificate_asn")
+ cur.execute("DELETE FROM ee_certificate_net")
+ cur.execute("DELETE FROM ee_certificate")
+
+ for s in [self] + self.hosts:
+ for kid in s.kids:
+ cur.execute("SELECT registrant_id FROM registrant WHERE registrant_handle = %s AND registry_handle = %s",
+ (kid.name, s.name))
+ registrant_id = cur.fetchone()[0]
+ for as_range in kid.resources.asn:
+ cur.execute("INSERT registrant_asn (start_as, end_as, registrant_id) VALUES (%s, %s, %s)",
+ (as_range.min, as_range.max, registrant_id))
+ for v4_range in kid.resources.v4:
+ cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 4, %s)",
+ (v4_range.min, v4_range.max, registrant_id))
+ for v6_range in kid.resources.v6:
+ cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 6, %s)",
+ (v6_range.min, v6_range.max, registrant_id))
+ cur.execute("UPDATE registrant SET valid_until = %s WHERE registrant_id = %s",
+ (kid.resources.valid_until, registrant_id))
+ for r in s.roa_requests:
+ cur.execute("INSERT roa_request (self_handle, asn) VALUES (%s, %s)",
+ (s.name, r.asn))
+ roa_request_id = cur.lastrowid
+ for version, prefix_set in ((4, r.v4), (6, r.v6)):
+ if prefix_set:
+ cur.executemany("INSERT roa_request_prefix "
+ "(roa_request_id, prefix, prefixlen, max_prefixlen, version) "
+ "VALUES (%s, %s, %s, %s, %s)",
+ ((roa_request_id, x.prefix, x.prefixlen, x.max_prefixlen, version)
+ for x in prefix_set))
+ for r in s.router_certs:
+ cur.execute("INSERT ee_certificate (self_handle, pkcs10, gski, cn, sn, eku, valid_until) "
+ "VALUES (%s, %s, %s, %s, %s, %s, %s)",
+ (s.name, r.pkcs10.get_DER(), r.gski, r.cn, r.sn, r.eku, s.resources.valid_until))
+ ee_certificate_id = cur.lastrowid
+ cur.executemany("INSERT ee_certificate_asn (ee_certificate_id, start_as, end_as) VALUES (%s, %s, %s)",
+ ((ee_certificate_id, a.min, a.max) for a in r.asn))
+ db.close()
+
+ def run_daemons(self):
+ """
+ Run daemons for this entity.
+ """
+
+ logger.info("Running daemons for %s", self.name)
+ env = dict(os.environ, RPKI_CONF = self.name + ".conf")
+ self.rpkid_process = subprocess.Popen((prog_python, prog_rpkid, "--foreground", "--log-stdout", "--log-level", "debug") +
+ (("--profile", self.name + ".prof") if args.profile else ()),
+ env = env)
+ self.irdbd_process = subprocess.Popen((prog_python, prog_irdbd, "--foreground", "--log-stdout", "--log-level", "debug"),
+ env = env)
+
+ def kill_daemons(self):
+ """
+ Kill daemons for this entity.
+ """
+
+ # pylint: disable=E1103
+ for proc, name in ((self.rpkid_process, "rpkid"),
+ (self.irdbd_process, "irdbd")):
+ if proc is not None and proc.poll() is None:
+ logger.info("Killing daemon %s pid %s for %s", name, proc.pid, self.name)
+ try:
+ proc.terminate()
+ except OSError:
+ pass
+ if proc is not None:
+ logger.info("Daemon %s pid %s for %s exited with code %s",
+ name, proc.pid, self.name, proc.wait())
+
+ def call_rpkid(self, pdus, cb):
+ """
+ Send a left-right message to this entity's RPKI daemon and return
+ the response.
+
+ If this entity is hosted (does not run its own RPKI daemon), all
+ of this happens with the hosting RPKI daemon.
+ """
+
+ logger.info("Calling rpkid for %s", self.name)
+
+ if self.is_hosted:
+ logger.info("rpkid %s is hosted by rpkid %s, switching", self.name, self.hosted_by.name)
+ self = self.hosted_by
+ assert not self.is_hosted
+
+ assert isinstance(pdus, (list, tuple))
+ assert self.rpki_port is not None
+
+ q_msg = rpki.left_right.msg.query(*pdus)
+ q_cms = rpki.left_right.cms_msg_saxify()
+ q_der = q_cms.wrap(q_msg, self.irbe_key, self.irbe_cert)
+ q_url = "http://localhost:%d/left-right" % self.rpki_port
+
+ logger.debug(q_cms.pretty_print_content())
+
+ def done(r_der):
+ logger.info("Callback from rpkid %s", self.name)
+ r_cms = rpki.left_right.cms_msg_saxify(DER = r_der)
+ r_msg = r_cms.unwrap((self.rpkid_ta, self.rpkid_cert))
+ self.last_cms_time = r_cms.check_replay(self.last_cms_time, q_url)
+ logger.debug(r_cms.pretty_print_content())
+ assert r_msg.is_reply
+ for r_pdu in r_msg:
+ assert not isinstance(r_pdu, rpki.left_right.report_error_elt)
+ cb(r_msg)
+
+ def lose(e):
+ raise
+
+ rpki.http.client(
+ url = q_url,
+ msg = q_der,
+ callback = done,
+ errback = lose)
+
+ def cross_certify(self, certificant, reverse = False):
+ """
+ Cross-certify and return the resulting certificate.
+ """
+
+ if reverse:
+ certifier = certificant
+ certificant = self.name + "-SELF"
+ else:
+ certifier = self.name + "-SELF"
+ return cross_certify(certificant, certifier)
+
+ def create_rpki_objects(self, cb):
+ """
+ Create RPKI engine objects for this engine.
+
+ Root node of the engine tree is special, it too has a parent but
+ that one is the magic self-signed micro engine.
+
+ The rest of this is straightforward. There are a lot of objects
+ to create, but we can do batch them all into one honking PDU, then
+ issue one more PDU to set BSC EE certificates based on the PKCS
+ #10 requests we get back when we tell rpkid to generate BSC keys.
+ """
+
+ assert not self.is_hosted
+
+ selves = [self] + self.hosts
+
+ rpkid_pdus = []
+ pubd_pdus = []
+
+ for i, s in enumerate(selves):
+ logger.info("Creating RPKI objects for [%d] %s", i, s.name)
+
+ rpkid_pdus.append(rpki.left_right.self_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ crl_interval = s.crl_interval,
+ regen_margin = s.regen_margin,
+ bpki_cert = (s.cross_certify(s.hosted_by.name + "-TA", reverse = True)
+ if s.is_hosted else
+ rpki.x509.X509(Auto_file = s.name + "-SELF.cer"))))
+
+ rpkid_pdus.append(rpki.left_right.bsc_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ bsc_handle = "b",
+ generate_keypair = True))
+
+ pubd_pdus.append(rpki.publication_control.client_elt.make_pdu(
+ action = "create",
+ client_handle = s.client_handle,
+ base_uri = s.sia_base,
+ bpki_cert = s.cross_certify(pubd_name + "-TA", reverse = True)))
+
+ rpkid_pdus.append(rpki.left_right.repository_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ bsc_handle = "b",
+ repository_handle = "r",
+ bpki_cert = s.cross_certify(pubd_name + "-TA"),
+ peer_contact_uri = "http://localhost:%d/client/%s" % (pubd_port, s.client_handle)))
+
+ for k in s.kids:
+ rpkid_pdus.append(rpki.left_right.child_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ child_handle = k.name,
+ bsc_handle = "b",
+ bpki_cert = s.cross_certify(k.name + "-SELF")))
+
+ if s.is_root:
+ rootd_cert = s.cross_certify(rootd_name + "-TA")
+ rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ parent_handle = "rootd",
+ bsc_handle = "b",
+ repository_handle = "r",
+ sia_base = s.sia_base,
+ bpki_cert = rootd_cert,
+ sender_name = s.name,
+ recipient_name = "rootd",
+ peer_contact_uri = "http://localhost:%s/" % rootd_port))
+ else:
+ rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ parent_handle = s.parent.name,
+ bsc_handle = "b",
+ repository_handle = "r",
+ sia_base = s.sia_base,
+ bpki_cert = s.cross_certify(s.parent.name + "-SELF"),
+ sender_name = s.name,
+ recipient_name = s.parent.name,
+ peer_contact_uri = "http://localhost:%s/up-down/%s/%s" % (s.parent.get_rpki_port(),
+ s.parent.name, s.name)))
+
+ def one():
+ call_pubd(pubd_pdus, cb = two)
+
+ def two(vals):
+ self.call_rpkid(rpkid_pdus, cb = three)
+
+ def three(vals):
+
+ bsc_dict = dict((b.self_handle, b) for b in vals if isinstance(b, rpki.left_right.bsc_elt))
+
+ bsc_pdus = []
+
+ for s in selves:
+ b = bsc_dict[s.name]
+
+ logger.info("Issuing BSC EE cert for %s", s.name)
+ cmd = (prog_openssl, "x509", "-req", "-sha256", "-extfile", s.name + "-RPKI.conf",
+ "-extensions", "req_x509_ext", "-days", "30",
+ "-CA", s.name + "-SELF.cer", "-CAkey", s.name + "-SELF.key", "-CAcreateserial", "-text")
+ signer = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
+ signed = signer.communicate(input = b.pkcs10_request.get_PEM())
+ if not signed[0]:
+ logger.warning(signed[1])
+ raise CouldntIssueBSCEECertificate("Couldn't issue BSC EE certificate")
+ s.bsc_ee = rpki.x509.X509(PEM = signed[0])
+ s.bsc_crl = rpki.x509.CRL(PEM_file = s.name + "-SELF.crl")
+ logger.info("BSC EE cert for %s SKI %s", s.name, s.bsc_ee.hSKI())
+
+ bsc_pdus.append(rpki.left_right.bsc_elt.make_pdu(
+ action = "set",
+ self_handle = s.name,
+ bsc_handle = "b",
+ signing_cert = s.bsc_ee,
+ signing_cert_crl = s.bsc_crl))
+
+ self.call_rpkid(bsc_pdus, cb = four)
+
+ def four(vals):
+ cb()
+
+ one()
+
+ def setup_yaml_leaf(self):
+ """
+ Generate certificates and write YAML scripts for leaf nodes.
+
+ We're cheating a bit here: properly speaking, we can't generate
+ issue or revoke requests without knowing the class, which is
+ generated on the fly, but at the moment the test case is
+ simplistic enough that the class will always be "1", so we just
+ wire in that value for now.
+
+ Well, ok, we just broke that assumption. Now we do something even
+ nastier, just to eke a bit more life out of this kludge. This
+ really needs to be rewritten, but it may require a different tool
+ than testpoke.
+ """
+
+ if not os.path.exists(self.name + ".key"):
+ logger.info("Generating RPKI key for %s", self.name)
+ subprocess.check_call((prog_openssl, "genrsa", "-out", self.name + ".key", "2048" ),
+ stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
+ ski = rpki.x509.RSA(PEM_file = self.name + ".key").gSKI()
+
+ if self.parent.is_hosted:
+ parent_host = self.parent.hosted_by.name
+ else:
+ parent_host = self.parent.name
- def apply_valid_add(self, text, cb):
- self.base.valid_until += rpki.sundial.timedelta.parse(text)
- cb()
+ self.cross_certify(self.parent.name + "-SELF")
+ self.cross_certify(parent_host + "-TA")
- def apply_valid_sub(self, text, cb):
- self.base.valid_until -= rpki.sundial.timedelta.parse(text)
- cb()
+ def run_cron(self, cb):
+ """
+ Trigger cron run for this engine.
+ """
- def apply_roa_request_add(self, yaml, cb):
- for y in yaml:
- r = roa_request.parse(y)
- if r not in self.roa_requests:
- self.roa_requests.append(r)
- cb()
+ logger.info("Running cron for %s", self.name)
- def apply_roa_request_del(self, yaml, cb):
- for y in yaml:
- r = roa_request.parse(y)
- if r in self.roa_requests:
- self.roa_requests.remove(r)
- cb()
+ assert self.rpki_port is not None
- def apply_router_cert_add(self, yaml, cb):
- for y in yaml:
- r = router_cert.parse(y)
- if r not in self.router_certs:
- self.router_certs.append(r)
- cb()
+ def done(result):
+ assert result == "OK", 'Expected "OK" result from cronjob, got %r' % result
+ cb()
- def apply_router_cert_del(self, yaml, cb):
- for y in yaml:
- r = router_cert.parse(y)
- if r in self.router_certs:
- self.router_certs.remove(r)
- cb()
+ rpki.http.client(
+ url = "http://localhost:%d/cronjob" % self.rpki_port,
+ msg = "Run cron now, please",
+ callback = done,
+ errback = done)
- def apply_rekey(self, target, cb):
-
- def done(e):
- if isinstance(e, Exception):
- logger.exception("Exception while rekeying %s", self.name)
- raise e
- cb()
-
- if target is None:
- logger.info("Rekeying <tenant/> %s", self.name)
- self.call_rpkid([rpki.left_right.self_elt.make_pdu(
- action = "set", self_handle = self.name, rekey = "yes")], cb = done)
- else:
- logger.info("Rekeying <parent/> %s %s", self.name, target)
- self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
- action = "set", self_handle = self.name, parent_handle = target, rekey = "yes")], cb = done)
-
- def apply_revoke(self, target, cb):
-
- def done(e):
- if isinstance(e, Exception):
- logger.exception("Exception while revoking %s", self.name)
- raise e
- cb()
-
- if target is None:
- logger.info("Revoking <tenant/> %s", self.name)
- self.call_rpkid([rpki.left_right.self_elt.make_pdu(
- action = "set", self_handle = self.name, revoke = "yes")], cb = done)
- else:
- logger.info("Revoking <parent/> %s %s", self.name, target)
- self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
- action = "set", self_handle = self.name, parent_handle = target, revoke = "yes")], cb = done)
-
- def __str__(self):
- s = self.name + "\n"
- if self.resources.asn: s += " ASN: %s\n" % self.resources.asn
- if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
- if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
- if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
- if self.parent: s += " Up: %s\n" % self.parent.name
- if self.sia_base: s += " SIA: %s\n" % self.sia_base
- return s + "Until: %s\n" % self.resources.valid_until
-
-
- @property
- def is_root(self):
- return self.parent is None
-
- @property
- def is_twig(self):
- return not self.is_root
-
- @property
- def is_hosted(self):
- return self.hosted_by is not None
-
- @property
- def is_engine(self):
- return not self.is_hosted
-
- def set_engine_number(self, n):
- """
- Set the engine number for this entity.
- """
+ def run_yaml(self):
+ """
+ Run YAML scripts for this leaf entity. Since we're not bothering
+ to check the class list returned by the list command, the issue
+ command may fail, so we treat failure of the list command as an
+ error, but only issue a warning when issue fails.
+ """
- self.irdb_db_name = "irdb%d" % n
- self.irdb_port = allocate_port()
- self.rpki_db_name = "rpki%d" % n
- self.rpki_port = allocate_port()
+ logger.info("Running YAML for %s", self.name)
+ subprocess.check_call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "list"))
+ if subprocess.call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "issue")) != 0:
+ logger.warning("YAML issue command failed for %s, continuing", self.name)
- def get_rpki_port(self):
+def setup_bpki_cert_chain(name, ee = (), ca = ()):
"""
- Get rpki port to use for this entity.
+ Build a set of BPKI certificates.
"""
- if self.is_hosted:
- assert self.hosted_by.rpki_port is not None
- return self.hosted_by.rpki_port
- else:
- assert self.rpki_port is not None
- return self.rpki_port
+ s = "exec >/dev/null 2>&1\n"
+ #s = "set -x\n"
+ for kind in ("TA",) + ee + ca:
+ d = dict(name = name,
+ kind = kind,
+ ca = "false" if kind in ee else "true",
+ openssl = prog_openssl)
+ f = open("%(name)s-%(kind)s.conf" % d, "w")
+ f.write(bpki_cert_fmt_1 % d)
+ f.close()
+ if not os.path.exists("%(name)s-%(kind)s.key" % d):
+ s += bpki_cert_fmt_2 % d
+ s += bpki_cert_fmt_3 % d
+ d = dict(name = name,
+ openssl = prog_openssl)
+ s += bpki_cert_fmt_4 % d
+ for kind in ee + ca:
+ d["kind"] = kind
+ s += bpki_cert_fmt_5 % d
+ for kind in ("TA",) + ca:
+ d["kind"] = kind
+ s += bpki_cert_fmt_6 % d
+ subprocess.check_call(s, shell = True)
- def setup_bpki_certs(self):
+def setup_rootd(rpkid, rootd_yaml, db):
"""
- Create BPKI certificates for this entity.
+ Write the config files for rootd.
"""
- logger.info("Constructing BPKI keys and certs for %s", self.name)
- setup_bpki_cert_chain(name = self.name,
- ee = ("RPKI", "IRDB", "IRBE"),
- ca = ("SELF",))
- self.rpkid_ta = rpki.x509.X509(PEM_file = self.name + "-TA.cer")
- self.irbe_key = rpki.x509.RSA( PEM_file = self.name + "-IRBE.key")
- self.irbe_cert = rpki.x509.X509(PEM_file = self.name + "-IRBE.cer")
- self.rpkid_cert = rpki.x509.X509(PEM_file = self.name + "-RPKI.cer")
+ rpkid.cross_certify(rootd_name + "-TA", reverse = True)
+ cross_certify(pubd_name + "-TA", rootd_name + "-TA")
+ logger.info("Writing config files for %s", rootd_name)
+ d = dict(rootd_name = rootd_name,
+ rootd_port = rootd_port,
+ rpkid_name = rpkid.name,
+ pubd_name = pubd_name,
+ rootd_sia = rootd_sia,
+ rsyncd_dir = rsyncd_dir,
+ openssl = prog_openssl,
+ lifetime = rootd_yaml.get("lifetime", "30d"),
+ pubd_port = pubd_port,
+ rootd_handle = db.root.client_handle + "-" + rootd_name)
+ f = open(rootd_name + ".conf", "w")
+ f.write(rootd_fmt_1 % d)
+ f.close()
+ s = "exec >/dev/null 2>&1\n"
+ #s = "set -x\n"
+ if not os.path.exists("root.key"):
+ s += rootd_fmt_2 % d
+ s += rootd_fmt_3 % d
+ subprocess.check_call(s, shell = True)
- def setup_conf_file(self):
+def setup_rcynic():
"""
- Write config files for this entity.
+ Write the config file for rcynic.
"""
- logger.info("Writing config files for %s", self.name)
- assert self.rpki_port is not None
- d = dict(my_name = self.name,
- irdb_db_name = self.irdb_db_name,
- irdb_db_pass = irdb_db_pass,
- irdb_port = self.irdb_port,
- rpki_db_name = self.rpki_db_name,
- rpki_db_pass = rpki_db_pass,
- rpki_port = self.rpki_port)
- f = open(self.name + ".conf", "w")
- f.write(conf_fmt_1 % d)
- for line in self.extra_conf:
- f.write(line + "\n")
+ logger.info("Config file for rcynic")
+ d = dict(rcynic_name = rcynic_name,
+ rootd_name = rootd_name,
+ rootd_sia = rootd_sia)
+ f = open(rcynic_name + ".conf", "w")
+ f.write(rcynic_fmt_1 % d)
f.close()
- def setup_sql(self, rpki_sql, irdb_sql):
+def setup_rsyncd():
"""
- Set up this entity's IRDB.
+ Write the config file for rsyncd.
"""
- logger.info("Setting up MySQL for %s", self.name)
- db = MySQLdb.connect(user = "rpki", db = self.rpki_db_name, passwd = rpki_db_pass,
- conv = sql_conversions)
- cur = db.cursor()
- db.autocommit(True)
- for sql in rpki_sql:
- try:
- cur.execute(sql)
- except:
- if "DROP TABLE IF EXISTS" not in sql.upper():
- raise
- db.close()
- db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
- conv = sql_conversions)
- cur = db.cursor()
- db.autocommit(True)
- for sql in irdb_sql:
- try:
- cur.execute(sql)
- except:
- if "DROP TABLE IF EXISTS" not in sql.upper():
- raise
- for s in [self] + self.hosts:
- for kid in s.kids:
- cur.execute("INSERT registrant (registrant_handle, registry_handle, valid_until) VALUES (%s, %s, %s)",
- (kid.name, s.name, kid.resources.valid_until))
- db.close()
+ logger.info("Config file for rsyncd")
+ d = dict(rsyncd_name = rsyncd_name,
+ rsyncd_port = rsyncd_port,
+ rsyncd_module = rsyncd_module,
+ rsyncd_dir = rsyncd_dir)
+ f = open(rsyncd_name + ".conf", "w")
+ f.write(rsyncd_fmt_1 % d)
+ f.close()
- def sync_sql(self):
+def setup_publication(pubd_sql, irdb_db_name):
"""
- Whack this entity's IRDB to match our master database. We do this
- once during setup, then do it again every time we apply a delta to
- this entity.
+ Set up publication daemon.
"""
- logger.info("Updating MySQL data for IRDB %s", self.name)
- db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
+ logger.info("Configure publication daemon")
+ publication_dir = os.getcwd() + "/publication"
+ assert rootd_sia.startswith("rsync://")
+ global rsyncd_dir
+ rsyncd_dir = publication_dir + "/".join(rootd_sia.split("/")[4:])
+ if not rsyncd_dir.endswith("/"):
+ rsyncd_dir += "/"
+ os.makedirs(rsyncd_dir + "root/trunk")
+ db = MySQLdb.connect(db = pubd_db_name, user = pubd_db_user, passwd = pubd_db_pass,
conv = sql_conversions)
cur = db.cursor()
db.autocommit(True)
- cur.execute("DELETE FROM registrant_asn")
- cur.execute("DELETE FROM registrant_net")
- cur.execute("DELETE FROM roa_request_prefix")
- cur.execute("DELETE FROM roa_request")
- cur.execute("DELETE FROM ee_certificate_asn")
- cur.execute("DELETE FROM ee_certificate_net")
- cur.execute("DELETE FROM ee_certificate")
-
- for s in [self] + self.hosts:
- for kid in s.kids:
- cur.execute("SELECT registrant_id FROM registrant WHERE registrant_handle = %s AND registry_handle = %s",
- (kid.name, s.name))
- registrant_id = cur.fetchone()[0]
- for as_range in kid.resources.asn:
- cur.execute("INSERT registrant_asn (start_as, end_as, registrant_id) VALUES (%s, %s, %s)",
- (as_range.min, as_range.max, registrant_id))
- for v4_range in kid.resources.v4:
- cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 4, %s)",
- (v4_range.min, v4_range.max, registrant_id))
- for v6_range in kid.resources.v6:
- cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 6, %s)",
- (v6_range.min, v6_range.max, registrant_id))
- cur.execute("UPDATE registrant SET valid_until = %s WHERE registrant_id = %s",
- (kid.resources.valid_until, registrant_id))
- for r in s.roa_requests:
- cur.execute("INSERT roa_request (self_handle, asn) VALUES (%s, %s)",
- (s.name, r.asn))
- roa_request_id = cur.lastrowid
- for version, prefix_set in ((4, r.v4), (6, r.v6)):
- if prefix_set:
- cur.executemany("INSERT roa_request_prefix "
- "(roa_request_id, prefix, prefixlen, max_prefixlen, version) "
- "VALUES (%s, %s, %s, %s, %s)",
- ((roa_request_id, x.prefix, x.prefixlen, x.max_prefixlen, version)
- for x in prefix_set))
- for r in s.router_certs:
- cur.execute("INSERT ee_certificate (self_handle, pkcs10, gski, cn, sn, eku, valid_until) "
- "VALUES (%s, %s, %s, %s, %s, %s, %s)",
- (s.name, r.pkcs10.get_DER(), r.gski, r.cn, r.sn, r.eku, s.resources.valid_until))
- ee_certificate_id = cur.lastrowid
- cur.executemany("INSERT ee_certificate_asn (ee_certificate_id, start_as, end_as) VALUES (%s, %s, %s)",
- ((ee_certificate_id, a.min, a.max) for a in r.asn))
+ for sql in pubd_sql:
+ try:
+ cur.execute(sql)
+ except:
+ if "DROP TABLE IF EXISTS" not in sql.upper():
+ raise
db.close()
+ d = dict(pubd_name = pubd_name,
+ pubd_port = pubd_port,
+ pubd_db_name = pubd_db_name,
+ pubd_db_user = pubd_db_user,
+ pubd_db_pass = pubd_db_pass,
+ pubd_dir = rsyncd_dir,
+ irdb_db_name = irdb_db_name,
+ irdb_db_pass = irdb_db_pass)
+ f = open(pubd_name + ".conf", "w")
+ f.write(pubd_fmt_1 % d)
+ f.close()
+ global pubd_ta
+ global pubd_irbe_key
+ global pubd_irbe_cert
+ global pubd_pubd_cert
+ pubd_ta = rpki.x509.X509(Auto_file = pubd_name + "-TA.cer")
+ pubd_irbe_key = rpki.x509.RSA( Auto_file = pubd_name + "-IRBE.key")
+ pubd_irbe_cert = rpki.x509.X509(Auto_file = pubd_name + "-IRBE.cer")
+ pubd_pubd_cert = rpki.x509.X509(Auto_file = pubd_name + "-PUBD.cer")
- def run_daemons(self):
- """
- Run daemons for this entity.
- """
-
- logger.info("Running daemons for %s", self.name)
- env = dict(os.environ, RPKI_CONF = self.name + ".conf")
- self.rpkid_process = subprocess.Popen((prog_python, prog_rpkid, "--foreground", "--log-stdout", "--log-level", "debug") +
- (("--profile", self.name + ".prof") if args.profile else ()),
- env = env)
- self.irdbd_process = subprocess.Popen((prog_python, prog_irdbd, "--foreground", "--log-stdout", "--log-level", "debug"),
- env = env)
-
- def kill_daemons(self):
- """
- Kill daemons for this entity.
- """
-
- # pylint: disable=E1103
- for proc, name in ((self.rpkid_process, "rpkid"),
- (self.irdbd_process, "irdbd")):
- if proc is not None and proc.poll() is None:
- logger.info("Killing daemon %s pid %s for %s", name, proc.pid, self.name)
- try:
- proc.terminate()
- except OSError:
- pass
- if proc is not None:
- logger.info("Daemon %s pid %s for %s exited with code %s",
- name, proc.pid, self.name, proc.wait())
-
- def call_rpkid(self, pdus, cb):
+def call_pubd(pdus, cb):
"""
- Send a left-right message to this entity's RPKI daemon and return
+ Send a publication control message to publication daemon and return
the response.
-
- If this entity is hosted (does not run its own RPKI daemon), all
- of this happens with the hosting RPKI daemon.
"""
- logger.info("Calling rpkid for %s", self.name)
-
- if self.is_hosted:
- logger.info("rpkid %s is hosted by rpkid %s, switching", self.name, self.hosted_by.name)
- self = self.hosted_by
- assert not self.is_hosted
-
- assert isinstance(pdus, (list, tuple))
- assert self.rpki_port is not None
-
- q_msg = rpki.left_right.msg.query(*pdus)
- q_cms = rpki.left_right.cms_msg_saxify()
- q_der = q_cms.wrap(q_msg, self.irbe_key, self.irbe_cert)
- q_url = "http://localhost:%d/left-right" % self.rpki_port
+ logger.info("Calling pubd")
+ q_msg = rpki.publication_control.msg.query(*pdus)
+ q_cms = rpki.publication_control.cms_msg_saxify()
+ q_der = q_cms.wrap(q_msg, pubd_irbe_key, pubd_irbe_cert)
+ q_url = "http://localhost:%d/control" % pubd_port
logger.debug(q_cms.pretty_print_content())
- def done(r_der):
- logger.info("Callback from rpkid %s", self.name)
- r_cms = rpki.left_right.cms_msg_saxify(DER = r_der)
- r_msg = r_cms.unwrap((self.rpkid_ta, self.rpkid_cert))
- self.last_cms_time = r_cms.check_replay(self.last_cms_time, q_url)
- logger.debug(r_cms.pretty_print_content())
- assert r_msg.is_reply
- for r_pdu in r_msg:
- assert not isinstance(r_pdu, rpki.left_right.report_error_elt)
- cb(r_msg)
+ def call_pubd_cb(r_der):
+ global pubd_last_cms_time
+ r_cms = rpki.publication_control.cms_msg_saxify(DER = r_der)
+ r_msg = r_cms.unwrap((pubd_ta, pubd_pubd_cert))
+ pubd_last_cms_time = r_cms.check_replay(pubd_last_cms_time, q_url)
+ logger.debug(r_cms.pretty_print_content())
+ assert r_msg.is_reply
+ for r_pdu in r_msg:
+ r_pdu.raise_if_error()
+ cb(r_msg)
- def lose(e):
- raise
+ def call_pubd_eb(e):
+ logger.exception("Problem calling pubd")
rpki.http.client(
- url = q_url,
- msg = q_der,
- callback = done,
- errback = lose)
+ url = q_url,
+ msg = q_der,
+ callback = call_pubd_cb,
+ errback = call_pubd_eb)
+
- def cross_certify(self, certificant, reverse = False):
+def cross_certify(certificant, certifier):
"""
Cross-certify and return the resulting certificate.
"""
- if reverse:
- certifier = certificant
- certificant = self.name + "-SELF"
- else:
- certifier = self.name + "-SELF"
- return cross_certify(certificant, certifier)
-
- def create_rpki_objects(self, cb):
- """
- Create RPKI engine objects for this engine.
+ certfile = certifier + "-" + certificant + ".cer"
- Root node of the engine tree is special, it too has a parent but
- that one is the magic self-signed micro engine.
-
- The rest of this is straightforward. There are a lot of objects
- to create, but we can do batch them all into one honking PDU, then
- issue one more PDU to set BSC EE certificates based on the PKCS
- #10 requests we get back when we tell rpkid to generate BSC keys.
- """
+ logger.info("Cross certifying %s into %s's BPKI (%s)", certificant, certifier, certfile)
- assert not self.is_hosted
-
- selves = [self] + self.hosts
-
- rpkid_pdus = []
- pubd_pdus = []
-
- for i, s in enumerate(selves):
- logger.info("Creating RPKI objects for [%d] %s", i, s.name)
-
- rpkid_pdus.append(rpki.left_right.self_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- crl_interval = s.crl_interval,
- regen_margin = s.regen_margin,
- bpki_cert = (s.cross_certify(s.hosted_by.name + "-TA", reverse = True)
- if s.is_hosted else
- rpki.x509.X509(Auto_file = s.name + "-SELF.cer"))))
-
- rpkid_pdus.append(rpki.left_right.bsc_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- bsc_handle = "b",
- generate_keypair = True))
-
- pubd_pdus.append(rpki.publication_control.client_elt.make_pdu(
- action = "create",
- client_handle = s.client_handle,
- base_uri = s.sia_base,
- bpki_cert = s.cross_certify(pubd_name + "-TA", reverse = True)))
-
- rpkid_pdus.append(rpki.left_right.repository_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- bsc_handle = "b",
- repository_handle = "r",
- bpki_cert = s.cross_certify(pubd_name + "-TA"),
- peer_contact_uri = "http://localhost:%d/client/%s" % (pubd_port, s.client_handle)))
-
- for k in s.kids:
- rpkid_pdus.append(rpki.left_right.child_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- child_handle = k.name,
- bsc_handle = "b",
- bpki_cert = s.cross_certify(k.name + "-SELF")))
-
- if s.is_root:
- rootd_cert = s.cross_certify(rootd_name + "-TA")
- rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- parent_handle = "rootd",
- bsc_handle = "b",
- repository_handle = "r",
- sia_base = s.sia_base,
- bpki_cert = rootd_cert,
- sender_name = s.name,
- recipient_name = "rootd",
- peer_contact_uri = "http://localhost:%s/" % rootd_port))
- else:
- rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- parent_handle = s.parent.name,
- bsc_handle = "b",
- repository_handle = "r",
- sia_base = s.sia_base,
- bpki_cert = s.cross_certify(s.parent.name + "-SELF"),
- sender_name = s.name,
- recipient_name = s.parent.name,
- peer_contact_uri = "http://localhost:%s/up-down/%s/%s" % (s.parent.get_rpki_port(),
- s.parent.name, s.name)))
-
- def one():
- call_pubd(pubd_pdus, cb = two)
-
- def two(vals):
- self.call_rpkid(rpkid_pdus, cb = three)
-
- def three(vals):
-
- bsc_dict = dict((b.self_handle, b) for b in vals if isinstance(b, rpki.left_right.bsc_elt))
-
- bsc_pdus = []
-
- for s in selves:
- b = bsc_dict[s.name]
-
- logger.info("Issuing BSC EE cert for %s", s.name)
- cmd = (prog_openssl, "x509", "-req", "-sha256", "-extfile", s.name + "-RPKI.conf",
- "-extensions", "req_x509_ext", "-days", "30",
- "-CA", s.name + "-SELF.cer", "-CAkey", s.name + "-SELF.key", "-CAcreateserial", "-text")
- signer = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
- signed = signer.communicate(input = b.pkcs10_request.get_PEM())
- if not signed[0]:
- logger.warning(signed[1])
- raise CouldntIssueBSCEECertificate("Couldn't issue BSC EE certificate")
- s.bsc_ee = rpki.x509.X509(PEM = signed[0])
- s.bsc_crl = rpki.x509.CRL(PEM_file = s.name + "-SELF.crl")
- logger.info("BSC EE cert for %s SKI %s", s.name, s.bsc_ee.hSKI())
-
- bsc_pdus.append(rpki.left_right.bsc_elt.make_pdu(
- action = "set",
- self_handle = s.name,
- bsc_handle = "b",
- signing_cert = s.bsc_ee,
- signing_cert_crl = s.bsc_crl))
-
- self.call_rpkid(bsc_pdus, cb = four)
-
- def four(vals):
- cb()
-
- one()
-
- def setup_yaml_leaf(self):
- """
- Generate certificates and write YAML scripts for leaf nodes.
-
- We're cheating a bit here: properly speaking, we can't generate
- issue or revoke requests without knowing the class, which is
- generated on the fly, but at the moment the test case is
- simplistic enough that the class will always be "1", so we just
- wire in that value for now.
-
- Well, ok, we just broke that assumption. Now we do something even
- nastier, just to eke a bit more life out of this kludge. This
- really needs to be rewritten, but it may require a different tool
- than testpoke.
- """
+ child = rpki.x509.X509(Auto_file = certificant + ".cer")
+ parent = rpki.x509.X509(Auto_file = certifier + ".cer")
+ keypair = rpki.x509.RSA(Auto_file = certifier + ".key")
+ serial_file = certifier + ".srl"
- if not os.path.exists(self.name + ".key"):
- logger.info("Generating RPKI key for %s", self.name)
- subprocess.check_call((prog_openssl, "genrsa", "-out", self.name + ".key", "2048" ),
- stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
- ski = rpki.x509.RSA(PEM_file = self.name + ".key").gSKI()
+ now = rpki.sundial.now()
+ notAfter = now + rpki.sundial.timedelta(days = 30)
- if self.parent.is_hosted:
- parent_host = self.parent.hosted_by.name
- else:
- parent_host = self.parent.name
+ try:
+ with open(serial_file, "r") as f:
+ serial = int(f.read().splitlines()[0], 16)
+ except IOError:
+ serial = 1
- self.cross_certify(self.parent.name + "-SELF")
- self.cross_certify(parent_host + "-TA")
+ x = parent.bpki_cross_certify(
+ keypair = keypair,
+ source_cert = child,
+ serial = serial,
+ notAfter = notAfter,
+ now = now)
- def run_cron(self, cb):
- """
- Trigger cron run for this engine.
- """
+ with open(serial_file, "w") as f:
+ f.write("%02x\n" % (serial + 1))
- logger.info("Running cron for %s", self.name)
+ with open(certfile, "w") as f:
+ f.write(x.get_PEM())
- assert self.rpki_port is not None
+ logger.debug("Cross certified %s:", certfile)
+ logger.debug(" Issuer %s [%s]", x.getIssuer(), x.hAKI())
+ logger.debug(" Subject %s [%s]", x.getSubject(), x.hSKI())
+ return x
- def done(result):
- assert result == "OK", 'Expected "OK" result from cronjob, got %r' % result
- cb()
-
- rpki.http.client(
- url = "http://localhost:%d/cronjob" % self.rpki_port,
- msg = "Run cron now, please",
- callback = done,
- errback = done)
+last_rcynic_run = None
- def run_yaml(self):
+def run_rcynic():
"""
- Run YAML scripts for this leaf entity. Since we're not bothering
- to check the class list returned by the list command, the issue
- command may fail, so we treat failure of the list command as an
- error, but only issue a warning when issue fails.
+ Run rcynic to see whether what was published makes sense.
"""
- logger.info("Running YAML for %s", self.name)
- subprocess.check_call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "list"))
- if subprocess.call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "issue")) != 0:
- logger.warning("YAML issue command failed for %s, continuing", self.name)
+ logger.info("Running rcynic")
+ env = os.environ.copy()
+ env["TZ"] = ""
+ global last_rcynic_run
+ if int(time.time()) == last_rcynic_run:
+ time.sleep(1)
+ subprocess.check_call((prog_rcynic, "-c", rcynic_name + ".conf"), env = env)
+ subprocess.call(rcynic_stats, shell = True, env = env)
+ last_rcynic_run = int(time.time())
+ os.link("%s.xml" % rcynic_name, "%s.%s.xml" % (rcynic_name, last_rcynic_run))
-def setup_bpki_cert_chain(name, ee = (), ca = ()):
- """
- Build a set of BPKI certificates.
- """
+def mangle_sql(filename):
+ """
+ Mangle an SQL file into a sequence of SQL statements.
+ """
- s = "exec >/dev/null 2>&1\n"
- #s = "set -x\n"
- for kind in ("TA",) + ee + ca:
- d = dict(name = name,
- kind = kind,
- ca = "false" if kind in ee else "true",
- openssl = prog_openssl)
- f = open("%(name)s-%(kind)s.conf" % d, "w")
- f.write(bpki_cert_fmt_1 % d)
+ words = []
+ f = open(filename)
+ for line in f:
+ words.extend(line.partition("--")[0].split())
f.close()
- if not os.path.exists("%(name)s-%(kind)s.key" % d):
- s += bpki_cert_fmt_2 % d
- s += bpki_cert_fmt_3 % d
- d = dict(name = name,
- openssl = prog_openssl)
- s += bpki_cert_fmt_4 % d
- for kind in ee + ca:
- d["kind"] = kind
- s += bpki_cert_fmt_5 % d
- for kind in ("TA",) + ca:
- d["kind"] = kind
- s += bpki_cert_fmt_6 % d
- subprocess.check_call(s, shell = True)
-
-def setup_rootd(rpkid, rootd_yaml, db):
- """
- Write the config files for rootd.
- """
-
- rpkid.cross_certify(rootd_name + "-TA", reverse = True)
- cross_certify(pubd_name + "-TA", rootd_name + "-TA")
- logger.info("Writing config files for %s", rootd_name)
- d = dict(rootd_name = rootd_name,
- rootd_port = rootd_port,
- rpkid_name = rpkid.name,
- pubd_name = pubd_name,
- rootd_sia = rootd_sia,
- rsyncd_dir = rsyncd_dir,
- openssl = prog_openssl,
- lifetime = rootd_yaml.get("lifetime", "30d"),
- pubd_port = pubd_port,
- rootd_handle = db.root.client_handle + "-" + rootd_name)
- f = open(rootd_name + ".conf", "w")
- f.write(rootd_fmt_1 % d)
- f.close()
- s = "exec >/dev/null 2>&1\n"
- #s = "set -x\n"
- if not os.path.exists("root.key"):
- s += rootd_fmt_2 % d
- s += rootd_fmt_3 % d
- subprocess.check_call(s, shell = True)
-
-def setup_rcynic():
- """
- Write the config file for rcynic.
- """
-
- logger.info("Config file for rcynic")
- d = dict(rcynic_name = rcynic_name,
- rootd_name = rootd_name,
- rootd_sia = rootd_sia)
- f = open(rcynic_name + ".conf", "w")
- f.write(rcynic_fmt_1 % d)
- f.close()
-
-def setup_rsyncd():
- """
- Write the config file for rsyncd.
- """
-
- logger.info("Config file for rsyncd")
- d = dict(rsyncd_name = rsyncd_name,
- rsyncd_port = rsyncd_port,
- rsyncd_module = rsyncd_module,
- rsyncd_dir = rsyncd_dir)
- f = open(rsyncd_name + ".conf", "w")
- f.write(rsyncd_fmt_1 % d)
- f.close()
-
-def setup_publication(pubd_sql, irdb_db_name):
- """
- Set up publication daemon.
- """
-
- logger.info("Configure publication daemon")
- publication_dir = os.getcwd() + "/publication"
- assert rootd_sia.startswith("rsync://")
- global rsyncd_dir
- rsyncd_dir = publication_dir + "/".join(rootd_sia.split("/")[4:])
- if not rsyncd_dir.endswith("/"):
- rsyncd_dir += "/"
- os.makedirs(rsyncd_dir + "root/trunk")
- db = MySQLdb.connect(db = pubd_db_name, user = pubd_db_user, passwd = pubd_db_pass,
- conv = sql_conversions)
- cur = db.cursor()
- db.autocommit(True)
- for sql in pubd_sql:
- try:
- cur.execute(sql)
- except:
- if "DROP TABLE IF EXISTS" not in sql.upper():
- raise
- db.close()
- d = dict(pubd_name = pubd_name,
- pubd_port = pubd_port,
- pubd_db_name = pubd_db_name,
- pubd_db_user = pubd_db_user,
- pubd_db_pass = pubd_db_pass,
- pubd_dir = rsyncd_dir,
- irdb_db_name = irdb_db_name,
- irdb_db_pass = irdb_db_pass)
- f = open(pubd_name + ".conf", "w")
- f.write(pubd_fmt_1 % d)
- f.close()
- global pubd_ta
- global pubd_irbe_key
- global pubd_irbe_cert
- global pubd_pubd_cert
- pubd_ta = rpki.x509.X509(Auto_file = pubd_name + "-TA.cer")
- pubd_irbe_key = rpki.x509.RSA( Auto_file = pubd_name + "-IRBE.key")
- pubd_irbe_cert = rpki.x509.X509(Auto_file = pubd_name + "-IRBE.cer")
- pubd_pubd_cert = rpki.x509.X509(Auto_file = pubd_name + "-PUBD.cer")
-
-def call_pubd(pdus, cb):
- """
- Send a publication control message to publication daemon and return
- the response.
- """
-
- logger.info("Calling pubd")
- q_msg = rpki.publication_control.msg.query(*pdus)
- q_cms = rpki.publication_control.cms_msg_saxify()
- q_der = q_cms.wrap(q_msg, pubd_irbe_key, pubd_irbe_cert)
- q_url = "http://localhost:%d/control" % pubd_port
-
- logger.debug(q_cms.pretty_print_content())
-
- def call_pubd_cb(r_der):
- global pubd_last_cms_time
- r_cms = rpki.publication_control.cms_msg_saxify(DER = r_der)
- r_msg = r_cms.unwrap((pubd_ta, pubd_pubd_cert))
- pubd_last_cms_time = r_cms.check_replay(pubd_last_cms_time, q_url)
- logger.debug(r_cms.pretty_print_content())
- assert r_msg.is_reply
- for r_pdu in r_msg:
- r_pdu.raise_if_error()
- cb(r_msg)
-
- def call_pubd_eb(e):
- logger.exception("Problem calling pubd")
-
- rpki.http.client(
- url = q_url,
- msg = q_der,
- callback = call_pubd_cb,
- errback = call_pubd_eb)
-
-
-def cross_certify(certificant, certifier):
- """
- Cross-certify and return the resulting certificate.
- """
-
- certfile = certifier + "-" + certificant + ".cer"
-
- logger.info("Cross certifying %s into %s's BPKI (%s)", certificant, certifier, certfile)
-
- child = rpki.x509.X509(Auto_file = certificant + ".cer")
- parent = rpki.x509.X509(Auto_file = certifier + ".cer")
- keypair = rpki.x509.RSA(Auto_file = certifier + ".key")
- serial_file = certifier + ".srl"
-
- now = rpki.sundial.now()
- notAfter = now + rpki.sundial.timedelta(days = 30)
-
- try:
- with open(serial_file, "r") as f:
- serial = int(f.read().splitlines()[0], 16)
- except IOError:
- serial = 1
-
- x = parent.bpki_cross_certify(
- keypair = keypair,
- source_cert = child,
- serial = serial,
- notAfter = notAfter,
- now = now)
-
- with open(serial_file, "w") as f:
- f.write("%02x\n" % (serial + 1))
-
- with open(certfile, "w") as f:
- f.write(x.get_PEM())
-
- logger.debug("Cross certified %s:", certfile)
- logger.debug(" Issuer %s [%s]", x.getIssuer(), x.hAKI())
- logger.debug(" Subject %s [%s]", x.getSubject(), x.hSKI())
- return x
-
-last_rcynic_run = None
-
-def run_rcynic():
- """
- Run rcynic to see whether what was published makes sense.
- """
-
- logger.info("Running rcynic")
- env = os.environ.copy()
- env["TZ"] = ""
- global last_rcynic_run
- if int(time.time()) == last_rcynic_run:
- time.sleep(1)
- subprocess.check_call((prog_rcynic, "-c", rcynic_name + ".conf"), env = env)
- subprocess.call(rcynic_stats, shell = True, env = env)
- last_rcynic_run = int(time.time())
- os.link("%s.xml" % rcynic_name, "%s.%s.xml" % (rcynic_name, last_rcynic_run))
-
-def mangle_sql(filename):
- """
- Mangle an SQL file into a sequence of SQL statements.
- """
-
- words = []
- f = open(filename)
- for line in f:
- words.extend(line.partition("--")[0].split())
- f.close()
- return " ".join(words).strip(";").split(";")
+ return " ".join(words).strip(";").split(";")
bpki_cert_fmt_1 = '''\
[req]