aboutsummaryrefslogtreecommitdiff
path: root/rpkid/tests
diff options
context:
space:
mode:
Diffstat (limited to 'rpkid/tests')
-rw-r--r--rpkid/tests/old_irdbd.py21
-rw-r--r--rpkid/tests/old_irdbd.sql105
-rw-r--r--rpkid/tests/smoketest.py25
-rw-r--r--rpkid/tests/sql-cleaner.py33
-rw-r--r--rpkid/tests/yamltest-test-all.sh16
-rw-r--r--rpkid/tests/yamltest.py320
6 files changed, 315 insertions, 205 deletions
diff --git a/rpkid/tests/old_irdbd.py b/rpkid/tests/old_irdbd.py
new file mode 100644
index 00000000..3fa84b80
--- /dev/null
+++ b/rpkid/tests/old_irdbd.py
@@ -0,0 +1,21 @@
+"""
+$Id$
+
+Copyright (C) 2010-2012 Internet Systems Consortium ("ISC")
+
+Permission to use, copy, modify, and distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+"""
+
+if __name__ == "__main__":
+ import rpki.old_irdbd
+ rpki.old_irdbd.main()
diff --git a/rpkid/tests/old_irdbd.sql b/rpkid/tests/old_irdbd.sql
new file mode 100644
index 00000000..bf324cd8
--- /dev/null
+++ b/rpkid/tests/old_irdbd.sql
@@ -0,0 +1,105 @@
+-- $Id$
+
+-- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC")
+--
+-- Permission to use, copy, modify, and distribute this software for any
+-- purpose with or without fee is hereby granted, provided that the above
+-- copyright notice and this permission notice appear in all copies.
+--
+-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+-- PERFORMANCE OF THIS SOFTWARE.
+
+-- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN")
+--
+-- Permission to use, copy, modify, and distribute this software for any
+-- purpose with or without fee is hereby granted, provided that the above
+-- copyright notice and this permission notice appear in all copies.
+--
+-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
+-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
+-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+-- PERFORMANCE OF THIS SOFTWARE.
+
+-- SQL objects needed by irdbd.py. You only need this if you're using
+-- irdbd.py as your IRDB; if you have a "real" backend you can do
+-- anything you like so long as you implement the relevant portion of
+-- the left-right protocol.
+
+-- DROP TABLE commands must be in correct (reverse dependency) order
+-- to satisfy FOREIGN KEY constraints.
+
+DROP TABLE IF EXISTS roa_request_prefix;
+DROP TABLE IF EXISTS roa_request;
+DROP TABLE IF EXISTS registrant_net;
+DROP TABLE IF EXISTS registrant_asn;
+DROP TABLE IF EXISTS registrant;
+DROP TABLE IF EXISTS ghostbuster_request;
+
+CREATE TABLE registrant (
+ registrant_id SERIAL NOT NULL,
+ registrant_handle VARCHAR(255) NOT NULL,
+ registrant_name TEXT,
+ registry_handle VARCHAR(255),
+ valid_until DATETIME NOT NULL,
+ PRIMARY KEY (registrant_id),
+ UNIQUE (registry_handle, registrant_handle)
+) ENGINE=InnoDB;
+
+CREATE TABLE registrant_asn (
+ registrant_asn_id SERIAL NOT NULL,
+ start_as BIGINT UNSIGNED NOT NULL,
+ end_as BIGINT UNSIGNED NOT NULL,
+ registrant_id BIGINT UNSIGNED NOT NULL,
+ PRIMARY KEY (registrant_asn_id),
+ CONSTRAINT registrant_asn_registrant_id
+ FOREIGN KEY (registrant_id) REFERENCES registrant (registrant_id) ON DELETE CASCADE
+) ENGINE=InnoDB;
+
+CREATE TABLE registrant_net (
+ registrant_net_id SERIAL NOT NULL,
+ start_ip VARCHAR(40) NOT NULL,
+ end_ip VARCHAR(40) NOT NULL,
+ version TINYINT UNSIGNED NOT NULL,
+ registrant_id BIGINT UNSIGNED NOT NULL,
+ PRIMARY KEY (registrant_net_id),
+ CONSTRAINT registrant_net_registrant_id
+ FOREIGN KEY (registrant_id) REFERENCES registrant (registrant_id) ON DELETE CASCADE
+) ENGINE=InnoDB;
+
+CREATE TABLE roa_request (
+ roa_request_id SERIAL NOT NULL,
+ roa_request_handle VARCHAR(255) NOT NULL,
+ asn BIGINT UNSIGNED NOT NULL,
+ PRIMARY KEY (roa_request_id)
+) ENGINE=InnoDB;
+
+CREATE TABLE roa_request_prefix (
+ prefix VARCHAR(40) NOT NULL,
+ prefixlen TINYINT UNSIGNED NOT NULL,
+ max_prefixlen TINYINT UNSIGNED NOT NULL,
+ version TINYINT UNSIGNED NOT NULL,
+ roa_request_id BIGINT UNSIGNED NOT NULL,
+ PRIMARY KEY (roa_request_id, prefix, prefixlen, max_prefixlen),
+ CONSTRAINT roa_request_prefix_roa_request_id
+ FOREIGN KEY (roa_request_id) REFERENCES roa_request (roa_request_id) ON DELETE CASCADE
+) ENGINE=InnoDB;
+
+CREATE TABLE ghostbuster_request (
+ ghostbuster_request_id SERIAL NOT NULL,
+ self_handle VARCHAR(40) NOT NULL,
+ parent_handle VARCHAR(40),
+ vcard LONGBLOB NOT NULL,
+ PRIMARY KEY (ghostbuster_request_id)
+) ENGINE=InnoDB;
+
+-- Local Variables:
+-- indent-tabs-mode: nil
+-- End:
diff --git a/rpkid/tests/smoketest.py b/rpkid/tests/smoketest.py
index 32f78726..3cb90d11 100644
--- a/rpkid/tests/smoketest.py
+++ b/rpkid/tests/smoketest.py
@@ -124,8 +124,8 @@ pubd_name = cfg.get("pubd_name", "pubd")
prog_python = cfg.get("prog_python", sys.executable)
prog_rpkid = cfg.get("prog_rpkid", "../../rpkid.py")
-prog_irdbd = cfg.get("prog_irdbd", "../../irdbd.py")
-prog_poke = cfg.get("prog_poke", "../../testpoke.py")
+prog_irdbd = cfg.get("prog_irdbd", "../old_irdbd.py")
+prog_poke = cfg.get("prog_poke", "../testpoke.py")
prog_rootd = cfg.get("prog_rootd", "../../rootd.py")
prog_pubd = cfg.get("prog_pubd", "../../pubd.py")
prog_rsyncd = cfg.get("prog_rsyncd", "rsync")
@@ -135,7 +135,7 @@ prog_openssl = cfg.get("prog_openssl", "../../../openssl/openssl/apps/openss
rcynic_stats = cfg.get("rcynic_stats", "echo ; ../../../rcynic/show.sh %s.xml ; echo" % rcynic_name)
rpki_sql_file = cfg.get("rpki_sql_file", "../rpkid.sql")
-irdb_sql_file = cfg.get("irdb_sql_file", "../irdbd.sql")
+irdb_sql_file = cfg.get("irdb_sql_file", "old_irdbd.sql")
pub_sql_file = cfg.get("pub_sql_file", "../pubd.sql")
startup_delay = int(cfg.get("startup_delay", "10"))
@@ -868,7 +868,12 @@ class allocation(object):
except IOError:
serial = 1
- x = parent.cross_certify(keypair, child, serial, notAfter, now)
+ x = parent.bpki_cross_certify(
+ keypair = keypair,
+ source_cert = child,
+ serial = serial,
+ notAfter = notAfter,
+ now = now)
f = open(serial_file, "w")
f.write("%02x\n" % (serial + 1))
@@ -1265,16 +1270,12 @@ def mangle_sql(filename):
"""
Mangle an SQL file into a sequence of SQL statements.
"""
-
- # There is no pretty way to do this. Just shut your eyes, it'll be
- # over soon.
-
+ words = []
f = open(filename)
- statements = " ".join(" ".join(word for word in line.expandtabs().split(" ") if word)
- for line in [line.strip(" \t\n") for line in f.readlines()]
- if line and not line.startswith("--")).rstrip(";").split(";")
+ for line in f:
+ words.extend(line.partition("--")[0].split())
f.close()
- return [stmt.strip() for stmt in statements]
+ return " ".join(words).strip(";").split(";")
bpki_cert_fmt_1 = '''\
[ req ]
diff --git a/rpkid/tests/sql-cleaner.py b/rpkid/tests/sql-cleaner.py
index 5c772bc4..5db122e1 100644
--- a/rpkid/tests/sql-cleaner.py
+++ b/rpkid/tests/sql-cleaner.py
@@ -3,7 +3,7 @@
$Id$
-Copyright (C) 2009--2010 Internet Systems Consortium ("ISC")
+Copyright (C) 2009--2011 Internet Systems Consortium ("ISC")
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
@@ -18,7 +18,8 @@ OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
"""
-import subprocess, rpki.config
+import rpki.config, rpki.sql_schemas
+from rpki.mysql_import import MySQLdb
cfg = rpki.config.parser(None, "yamltest", allow_missing = True)
@@ -26,8 +27,30 @@ for name in ("rpkid", "irdbd", "pubd"):
username = cfg.get("%s_sql_username" % name, name[:4])
password = cfg.get("%s_sql_password" % name, "fnord")
+
+ schema = []
+ for line in getattr(rpki.sql_schemas, name, "").splitlines():
+ schema.extend(line.partition("--")[0].split())
+ schema = " ".join(schema).strip(";").split(";")
+ schema = [statement.strip() for statement in schema if statement and "DROP TABLE" not in statement]
for i in xrange(12):
- subprocess.check_call(
- ("mysql", "-u", username, "-p" + password, "%s%d" % (name[:4], i)),
- stdin = open("../%s.sql" % name))
+
+ database = "%s%d" % (name[:4], i)
+
+ db = MySQLdb.connect(user = username, db = database, passwd = password)
+ cur = db.cursor()
+
+ cur.execute("SHOW TABLES")
+ tables = [r[0] for r in cur.fetchall()]
+
+ cur.execute("SET foreign_key_checks = 0")
+ for table in tables:
+ cur.execute("DROP TABLE %s" % table)
+ cur.execute("SET foreign_key_checks = 1")
+
+ for statement in schema:
+ cur.execute(statement)
+
+ cur.close()
+ db.close()
diff --git a/rpkid/tests/yamltest-test-all.sh b/rpkid/tests/yamltest-test-all.sh
index f6a05237..46f3c59e 100644
--- a/rpkid/tests/yamltest-test-all.sh
+++ b/rpkid/tests/yamltest-test-all.sh
@@ -1,7 +1,7 @@
#!/bin/sh -
# $Id$
-# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC")
+# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC")
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
@@ -17,18 +17,18 @@
set -x
-export TZ=UTC MYRPKI_RNG=$(pwd)/myrpki.rng
+export TZ=UTC
test -z "$STY" && exec screen -L sh $0
screen -X split
screen -X focus
-runtime=$((30 * 60))
+: ${runtime=900}
for yaml in smoketest.*.yaml
do
- rm -rf test
+ rm -rf test rcynic-data
python sql-cleaner.py
screen python yamltest.py -p yamltest.pid $yaml
now=$(date +%s)
@@ -42,9 +42,13 @@ do
date
../../rcynic/rcynic
../../rcynic/show.sh
+ ../../utils/scan_roas/scan_roas rcynic-data/authenticated
date
done
- test -r yamltest.pid && kill -INT $(cat yamltest.pid)
- sleep 30
+ if test -r yamltest.pid
+ then
+ kill -INT $(cat yamltest.pid)
+ sleep 30
+ fi
make backup
done
diff --git a/rpkid/tests/yamltest.py b/rpkid/tests/yamltest.py
index ecd00af2..2d7e90d6 100644
--- a/rpkid/tests/yamltest.py
+++ b/rpkid/tests/yamltest.py
@@ -1,6 +1,6 @@
"""
Test framework, using the same YAML test description format as
-smoketest.py, but using the myrpki.py tool to do all the back-end
+smoketest.py, but using the rpkic.py tool to do all the back-end
work. Reads YAML file, generates .csv and .conf files, runs daemons
and waits for one of them to exit.
@@ -10,7 +10,7 @@ Still to do:
- Implement smoketest.py-style delta actions, that is, modify the
allocation database under control of the YAML file, dump out new
- .csv files, and run myrpki.py again to feed resulting changes into
+ .csv files, and run rpkic.py again to feed resulting changes into
running daemons.
$Id$
@@ -46,7 +46,8 @@ PERFORMANCE OF THIS SOFTWARE.
"""
import subprocess, re, os, getopt, sys, yaml, signal, time
-import rpki.resource_set, rpki.sundial, rpki.config, rpki.log, rpki.myrpki
+import rpki.resource_set, rpki.sundial, rpki.config, rpki.log
+import rpki.csv_utils, rpki.x509
# Nasty regular expressions for parsing config files. Sadly, while
# the Python ConfigParser supports writing config files, it does so in
@@ -67,15 +68,11 @@ this_dir = os.getcwd()
test_dir = cleanpath(this_dir, "yamltest.dir")
rpkid_dir = cleanpath(this_dir, "..")
-prog_myrpki = cleanpath(rpkid_dir, "myrpki.py")
-prog_rpkid = cleanpath(rpkid_dir, "rpkid.py")
-prog_irdbd = cleanpath(rpkid_dir, "irdbd.py")
-prog_pubd = cleanpath(rpkid_dir, "pubd.py")
-prog_rootd = cleanpath(rpkid_dir, "rootd.py")
-
-prog_openssl = cleanpath(this_dir, "../../openssl/openssl/apps/openssl")
-if not os.path.exists(prog_openssl):
- prog_openssl = "openssl"
+prog_rpkic = cleanpath(rpkid_dir, "rpkic")
+prog_rpkid = cleanpath(rpkid_dir, "rpkid")
+prog_irdbd = cleanpath(rpkid_dir, "irdbd")
+prog_pubd = cleanpath(rpkid_dir, "pubd")
+prog_rootd = cleanpath(rpkid_dir, "rootd")
class roa_request(object):
"""
@@ -116,14 +113,14 @@ class allocation_db(list):
def __init__(self, yaml):
list.__init__(self)
self.root = allocation(yaml, self)
- assert self.root.is_root()
+ assert self.root.is_root
if self.root.crl_interval is None:
self.root.crl_interval = 24 * 60 * 60
if self.root.regen_margin is None:
self.root.regen_margin = 24 * 60 * 60
for a in self:
if a.sia_base is None:
- if a.runs_pubd():
+ if a.runs_pubd:
base = "rsync://localhost:%d/rpki/" % a.rsync_port
else:
base = a.parent.sia_base
@@ -134,14 +131,13 @@ class allocation_db(list):
a.crl_interval = a.parent.crl_interval
if a.regen_margin is None:
a.regen_margin = a.parent.regen_margin
- a.client_handle = "/".join(a.sia_base.rstrip("/").split("/")[3:])
self.root.closure()
self.map = dict((a.name, a) for a in self)
for a in self:
- if a.is_hosted():
+ if a.is_hosted:
a.hosted_by = self.map[a.hosted_by]
a.hosted_by.hosts.append(a)
- assert not a.is_root() and not a.hosted_by.is_hosted()
+ assert not a.is_root and not a.hosted_by.is_hosted
def dump(self):
"""
@@ -154,12 +150,12 @@ class allocation_db(list):
class allocation(object):
"""
One entity in our allocation database. Every entity in the database
- is assumed to hold resources, so needs at least myrpki services.
+ is assumed to hold resources, so needs at least rpkic services.
Entities that don't have the hosted_by property run their own copies
of rpkid, irdbd, and pubd, so they also need myirbe services.
"""
- base_port = 4400
+ base_port = None
parent = None
crl_interval = None
regen_margin = None
@@ -218,14 +214,14 @@ class allocation(object):
self.base.v6 = self.base.v6.union(r.v6.to_resource_set())
self.hosted_by = yaml.get("hosted_by")
self.hosts = []
- if not self.is_hosted():
+ if not self.is_hosted:
self.engine = self.allocate_engine()
self.rpkid_port = self.allocate_port()
self.irdbd_port = self.allocate_port()
- if self.runs_pubd():
+ if self.runs_pubd:
self.pubd_port = self.allocate_port()
self.rsync_port = self.allocate_port()
- if self.is_root():
+ if self.is_root:
self.rootd_port = self.allocate_port()
def closure(self):
@@ -253,55 +249,56 @@ class allocation(object):
if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
if self.parent: s += " Up: %s\n" % self.parent.name
if self.sia_base: s += " SIA: %s\n" % self.sia_base
- if self.is_hosted(): s += " Host: %s\n" % self.hosted_by.name
+ if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name
if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts)
for r in self.roa_requests: s += " ROA: %s\n" % r
- if not self.is_hosted(): s += " IPort: %s\n" % self.irdbd_port
- if self.runs_pubd(): s += " PPort: %s\n" % self.pubd_port
- if not self.is_hosted(): s += " RPort: %s\n" % self.rpkid_port
- if self.runs_pubd(): s += " SPort: %s\n" % self.rsync_port
- if self.is_root(): s += " TPort: %s\n" % self.rootd_port
+ if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port
+ if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port
+ if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port
+ if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port
+ if self.is_root: s += " TPort: %s\n" % self.rootd_port
return s + " Until: %s\n" % self.resources.valid_until
+ @property
def is_root(self):
"""
Is this the root node?
"""
return self.parent is None
+ @property
def is_hosted(self):
"""
Is this entity hosted?
"""
return self.hosted_by is not None
+ @property
def runs_pubd(self):
"""
Does this entity run a pubd?
"""
- return self.is_root() or not (self.is_hosted() or only_one_pubd)
+ return self.is_root or not (self.is_hosted or only_one_pubd)
def path(self, *names):
"""
Construct pathnames in this entity's test directory.
"""
- return cleanpath(test_dir, self.name, *names)
+ return cleanpath(test_dir, self.host.name, *names)
def csvout(self, fn):
"""
- Open and log a CSV output file. We use delimiter and dialect
- settings imported from the myrpki module, so that we automatically
- write CSV files in the right format.
+ Open and log a CSV output file.
"""
path = self.path(fn)
print "Writing", path
- return rpki.myrpki.csv_writer(path)
+ return rpki.csv_utils.csv_writer(path)
def up_down_url(self):
"""
Construct service URL for this node's parent.
"""
- parent_port = self.parent.hosted_by.rpkid_port if self.parent.is_hosted() else self.parent.rpkid_port
+ parent_port = self.parent.hosted_by.rpkid_port if self.parent.is_hosted else self.parent.rpkid_port
return "http://localhost:%d/up-down/%s/%s" % (parent_port, self.parent.name, self.name)
def dump_asns(self, fn):
@@ -312,37 +309,7 @@ class allocation(object):
for k in self.kids:
f.writerows((k.name, a) for a in k.resources.asn)
f.close()
-
- def dump_children(self, fn):
- """
- Write children CSV file.
- """
- f = self.csvout(fn)
- f.writerows((k.name, k.resources.valid_until, k.path("bpki/resources/ca.cer"))
- for k in self.kids)
- f.close()
-
- def dump_parents(self, fn):
- """
- Write parents CSV file.
- """
- f = self.csvout(fn)
- if self.is_root():
- f.writerow(("rootd",
- "http://localhost:%d/" % self.rootd_port,
- self.path("bpki/servers/ca.cer"),
- self.path("bpki/servers/ca.cer"),
- self.name,
- self.sia_base))
- else:
- parent_host = self.parent.hosted_by if self.parent.is_hosted() else self.parent
- f.writerow((self.parent.name,
- self.up_down_url(),
- self.parent.path("bpki/resources/ca.cer"),
- parent_host.path("bpki/servers/ca.cer"),
- self.name,
- self.sia_base))
- f.close()
+ self.run_rpkic("load_asns", fn)
def dump_prefixes(self, fn):
"""
@@ -352,43 +319,45 @@ class allocation(object):
for k in self.kids:
f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6))
f.close()
+ self.run_rpkic("load_prefixes", fn)
def dump_roas(self, fn):
"""
Write ROA CSV file.
"""
- group = self.name if self.is_root() else self.parent.name
+ group = self.name if self.is_root else self.parent.name
f = self.csvout(fn)
for r in self.roa_requests:
f.writerows((p, r.asn, group)
for p in (r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ()))
f.close()
+ self.run_rpkic("load_roa_requests", fn)
- def dump_clients(self, fn, db):
- """
- Write pubclients CSV file.
- """
- if self.runs_pubd():
- f = self.csvout(fn)
- f.writerows((s.client_handle, s.path("bpki/resources/ca.cer"), s.sia_base)
- for s in (db if only_one_pubd else [self] + self.kids))
- f.close()
-
- def find_pubd(self):
+ @property
+ def pubd(self):
"""
Walk up tree until we find somebody who runs pubd.
"""
s = self
- path = [s]
- while not s.runs_pubd():
+ while not s.runs_pubd:
s = s.parent
- path.append(s)
- return s, ".".join(i.name for i in reversed(path))
+ return s
- def find_host(self):
+ @property
+ def client_handle(self):
"""
- Figure out who hosts this entity.
+ Work out what pubd configure_publication_client will call us.
"""
+ path = []
+ s = self
+ while not s.runs_pubd:
+ path.append(s)
+ s = s.parent
+ path.append(s)
+ return ".".join(i.name for i in reversed(path))
+
+ @property
+ def host(self):
return self.hosted_by or self
def dump_conf(self, fn):
@@ -396,13 +365,10 @@ class allocation(object):
Write configuration file for OpenSSL and RPKI tools.
"""
- s, ignored = self.find_pubd()
-
r = { "handle" : self.name,
- "run_rpkid" : str(not self.is_hosted()),
- "run_pubd" : str(self.runs_pubd()),
- "run_rootd" : str(self.is_root()),
- "openssl" : prog_openssl,
+ "run_rpkid" : str(not self.is_hosted),
+ "run_pubd" : str(self.runs_pubd),
+ "run_rootd" : str(self.is_root),
"irdbd_sql_database" : "irdb%d" % self.engine,
"irdbd_sql_username" : "irdb",
"rpkid_sql_database" : "rpki%d" % self.engine,
@@ -415,8 +381,8 @@ class allocation(object):
"pubd_sql_database" : "pubd%d" % self.engine,
"pubd_sql_username" : "pubd",
"pubd_server_host" : "localhost",
- "pubd_server_port" : str(s.pubd_port),
- "publication_rsync_server" : "localhost:%s" % s.rsync_port }
+ "pubd_server_port" : str(self.pubd.pubd_port),
+ "publication_rsync_server" : "localhost:%s" % self.pubd.rsync_port }
r.update(config_overrides)
@@ -442,7 +408,7 @@ class allocation(object):
Write rsyncd configuration file.
"""
- if self.runs_pubd():
+ if self.runs_pubd:
f = open(self.path(fn), "w")
print "Writing", f.name
f.writelines(s + "\n" for s in
@@ -457,40 +423,26 @@ class allocation(object):
"comment = RPKI test"))
f.close()
- def run_configure_daemons(self):
+ def run_rpkic(self, *args):
"""
- Run configure_daemons if this entity is not hosted by another engine.
+ Run rpkic for this entity.
"""
- if self.is_hosted():
- print "%s is hosted, skipping configure_daemons" % self.name
- else:
- files = [h.path("myrpki.xml") for h in self.hosts]
- self.run_myrpki("configure_daemons", *[f for f in files if os.path.exists(f)])
-
- def run_configure_resources(self):
- """
- Run configure_resources for this entity.
- """
- self.run_myrpki("configure_resources")
-
- def run_myrpki(self, *args):
- """
- Run myrpki.py for this entity.
- """
- print 'Running "%s" for %s' % (" ".join(("myrpki",) + args), self.name)
- subprocess.check_call((sys.executable, prog_myrpki) + args, cwd = self.path())
+ cmd = (prog_rpkic, "-i", self.name, "-c", self.path("rpki.conf")) + args
+ print 'Running "%s"' % " ".join(cmd)
+ subprocess.check_call(cmd, cwd = self.host.path())
def run_python_daemon(self, prog):
"""
Start a Python daemon and return a subprocess.Popen object
representing the running daemon.
"""
- basename = os.path.basename(prog)
- p = subprocess.Popen((sys.executable, prog, "-d", "-c", self.path("rpki.conf")),
+ cmd = (prog, "-d", "-c", self.path("rpki.conf"))
+ log = os.path.splitext(os.path.basename(prog))[0] + ".log"
+ p = subprocess.Popen(cmd,
cwd = self.path(),
- stdout = open(self.path(os.path.splitext(basename)[0] + ".log"), "w"),
+ stdout = open(self.path(log), "w"),
stderr = subprocess.STDOUT)
- print "Running %s for %s: pid %d process %r" % (basename, self.name, p.pid, p)
+ print 'Running %s for %s: pid %d process %r' % (" ".join(cmd), self.name, p.pid, p)
return p
def run_rpkid(self):
@@ -526,18 +478,6 @@ class allocation(object):
print "Running rsyncd for %s: pid %d process %r" % (self.name, p.pid, p)
return p
- def run_openssl(self, *args, **kwargs):
- """
- Run OpenSSL
- """
- env = { "PATH" : os.environ["PATH"],
- "BPKI_DIRECTORY" : self.path("bpki/servers"),
- "OPENSSL_CONF" : "/dev/null",
- "RANDFILE" : ".OpenSSL.whines.unless.I.set.this" }
- env.update(kwargs)
- subprocess.check_call((prog_openssl,) + args, cwd = self.path(), env = env)
-
-
os.environ["TZ"] = "UTC"
time.tzset()
@@ -580,7 +520,7 @@ try:
cfg = rpki.config.parser(cfg_file, "yamltest", allow_missing = True)
only_one_pubd = cfg.getboolean("only_one_pubd", True)
- prog_openssl = cfg.get("openssl", prog_openssl)
+ allocation.base_port = cfg.getint("base_port", 4400)
config_overrides = dict(
(k, cfg.get(k))
@@ -604,45 +544,61 @@ try:
# Show what we loaded
- db.dump()
+ #db.dump()
# Set up each entity in our test
for d in db:
- os.makedirs(d.path())
- d.dump_asns("asns.csv")
- d.dump_prefixes("prefixes.csv")
- d.dump_roas("roas.csv")
- d.dump_conf("rpki.conf")
- d.dump_rsyncd("rsyncd.conf")
- if False:
- d.dump_children("children.csv")
- d.dump_parents("parents.csv")
- d.dump_clients("pubclients.csv", db)
+ if not d.is_hosted:
+ os.makedirs(d.path())
+ os.makedirs(d.path("bpki/resources"))
+ os.makedirs(d.path("bpki/servers"))
+ d.dump_conf("rpki.conf")
+ if d.runs_pubd:
+ d.dump_rsyncd("rsyncd.conf")
# Initialize BPKI and generate self-descriptor for each entity.
for d in db:
- d.run_myrpki("initialize")
+ d.run_rpkic("initialize")
# Create publication directories.
for d in db:
- if d.is_root() or d.runs_pubd():
+ if d.is_root or d.runs_pubd:
os.makedirs(d.path("publication"))
# Create RPKI root certificate.
print "Creating rootd RPKI root certificate"
- # Should use req -subj here to set subject name. Later.
- db.root.run_openssl("x509", "-req", "-sha256", "-outform", "DER",
- "-signkey", "bpki/servers/ca.key",
- "-in", "bpki/servers/ca.req",
- "-out", "publication/root.cer",
- "-extfile", "rpki.conf",
- "-extensions", "rootd_x509_extensions")
+ root_resources = rpki.resource_set.resource_bag(
+ asn = rpki.resource_set.resource_set_as("0-4294967295"),
+ v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"),
+ v6 = rpki.resource_set.resource_set_ipv6("::/0"))
+
+ root_key = rpki.x509.RSA.generate()
+
+ root_uri = "rsync://localhost:%d/rpki/" % db.root.pubd.rsync_port
+
+ root_sia = ((rpki.oids.name2oid["id-ad-caRepository"], ("uri", root_uri)),
+ (rpki.oids.name2oid["id-ad-rpkiManifest"], ("uri", root_uri + "root.mnf")))
+
+ root_cert = rpki.x509.X509.self_certify(
+ keypair = root_key,
+ subject_key = root_key.get_RSApublic(),
+ serial = 1,
+ sia = root_sia,
+ notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365),
+ resources = root_resources)
+ f = open(db.root.path("publication/root.cer"), "wb")
+ f.write(root_cert.get_DER())
+ f.close()
+
+ f = open(db.root.path("bpki/servers/root.key"), "wb")
+ f.write(root_key.get_DER())
+ f.close()
# From here on we need to pay attention to initialization order. We
# used to do all the pre-configure_daemons stuff before running any
@@ -659,62 +615,62 @@ try:
print
print "Configuring", d.name
print
- if d.is_root():
- d.run_myrpki("configure_publication_client", d.path("entitydb", "repositories", "%s.xml" % d.name))
+ if d.is_root:
+ assert not d.is_hosted
+ d.run_rpkic("configure_publication_client",
+ d.path("%s.%s.repository-request.xml" % (d.name, d.name)))
print
- d.run_myrpki("configure_repository", d.path("entitydb", "pubclients", "%s.xml" % d.name))
+ d.run_rpkic("configure_repository",
+ d.path("%s.repository-response.xml" % d.client_handle))
print
else:
- d.parent.run_myrpki("configure_child", d.path("entitydb", "identity.xml"))
+ d.parent.run_rpkic("configure_child", d.path("%s.identity.xml" % d.name))
print
- d.run_myrpki("configure_parent", d.parent.path("entitydb", "children", "%s.xml" % d.name))
+ d.run_rpkic("configure_parent",
+ d.parent.path("%s.%s.parent-response.xml" % (d.parent.name, d.name)))
print
- publisher, path = d.find_pubd()
- publisher.run_myrpki("configure_publication_client", d.path("entitydb", "repositories", "%s.xml" % d.parent.name))
+ d.pubd.run_rpkic("configure_publication_client",
+ d.path("%s.%s.repository-request.xml" % (d.name, d.parent.name)))
print
- d.run_myrpki("configure_repository", publisher.path("entitydb", "pubclients", "%s.xml" % path))
+ d.run_rpkic("configure_repository",
+ d.pubd.path("%s.repository-response.xml" % d.client_handle))
print
- parent_host = d.parent.find_host()
- if d.parent is not parent_host:
- d.parent.run_configure_resources()
- print
- parent_host.run_configure_daemons()
+ d.parent.run_rpkic("synchronize")
print
- if publisher is not parent_host:
- publisher.run_configure_daemons()
+ if d.pubd is not d.parent.host:
+ d.pubd.run_rpkic("synchronize")
print
print "Running daemons for", d.name
- if d.is_root():
+ if d.is_root:
progs.append(d.run_rootd())
- if not d.is_hosted():
+ if not d.is_hosted:
progs.append(d.run_irdbd())
progs.append(d.run_rpkid())
- if d.runs_pubd():
+ if d.runs_pubd:
progs.append(d.run_pubd())
progs.append(d.run_rsyncd())
- if d.is_root() or not d.is_hosted() or d.runs_pubd():
+ if d.is_root or not d.is_hosted or d.runs_pubd:
print "Giving", d.name, "daemons time to start up"
time.sleep(20)
print
assert all(p.poll() is None for p in progs)
- # Run configure_daemons to set up IRDB and RPKI objects. Need to
- # run a second time to push BSC certs out to rpkid. Nothing
- # should happen on the third pass. Oops, when hosting we need to
- # run configure_resources between passes, since only the hosted
- # entity can issue the BSC, etc.
+ # In theory we now only need to synchronize the new entity once.
+ d.run_rpkic("synchronize")
+ # Run through list again, to be sure we catch hosted cases.
+ # In theory this is no longer necessary.
+ if False:
for i in xrange(3):
- d.run_configure_resources()
- d.find_host().run_configure_daemons()
+ for d in db:
+ d.run_rpkic("synchronize")
- # Run through list again, to be sure we catch hosted cases
-
- for i in xrange(3):
- for d in db:
- d.run_configure_resources()
- d.run_configure_daemons()
+ # Load all the CSV files
+ for d in db:
+ d.dump_asns("%s.asns.csv" % d.name)
+ d.dump_prefixes("%s.prefixes.csv" % d.name)
+ d.dump_roas("%s.roas.csv" % d.name)
print "Done initializing daemons"