aboutsummaryrefslogtreecommitdiff
path: root/ca/tests
diff options
context:
space:
mode:
Diffstat (limited to 'ca/tests')
-rw-r--r--ca/tests/Makefile.in42
-rwxr-xr-xca/tests/bgpsec-yaml.py52
-rw-r--r--ca/tests/left-right-protocol-samples.xml170
-rw-r--r--ca/tests/myrpki-xml-parse-test.py74
-rw-r--r--ca/tests/old_irdbd.py4
-rw-r--r--ca/tests/publication-control-protocol-samples.xml155
-rw-r--r--ca/tests/publication-protocol-samples.xml451
-rw-r--r--ca/tests/rrdp-samples.xml81
-rw-r--r--ca/tests/smoketest.6.yaml3
-rw-r--r--ca/tests/smoketest.py2316
-rw-r--r--ca/tests/sql-cleaner.py44
-rw-r--r--ca/tests/sql-dumper.py22
-rwxr-xr-xca/tests/test-rrdp.py123
-rw-r--r--ca/tests/testpoke.py118
-rw-r--r--ca/tests/xml-parse-test.py136
-rw-r--r--ca/tests/yamlconf.py1423
-rw-r--r--ca/tests/yamltest-test-all.sh26
-rwxr-xr-x[-rw-r--r--]ca/tests/yamltest.py1535
18 files changed, 3520 insertions, 3255 deletions
diff --git a/ca/tests/Makefile.in b/ca/tests/Makefile.in
index 9796dd2b..eb4357b1 100644
--- a/ca/tests/Makefile.in
+++ b/ca/tests/Makefile.in
@@ -3,12 +3,11 @@
PYTHON = @PYTHON@
abs_top_builddir = @abs_top_builddir@
-all: protocol-samples
+all:
+ @true
clean:
- rm -rf smoketest.dir left-right-protocol-samples publication-protocol-samples yamltest.dir rcynic.xml rcynic-data
-
-protocol-samples: left-right-protocol-samples/.stamp publication-protocol-samples/.stamp
+ rm -rf smoketest.dir left-right-protocol-samples publication-protocol-samples publication-control-protocol-samples rrdp-samples yamltest.dir rcynic.xml rcynic-data
left-right-protocol-samples/.stamp: left-right-protocol-samples.xml split-protocol-samples.xsl
rm -rf left-right-protocol-samples
@@ -16,20 +15,44 @@ left-right-protocol-samples/.stamp: left-right-protocol-samples.xml split-protoc
xsltproc --param verbose 0 --stringparam dir left-right-protocol-samples split-protocol-samples.xsl left-right-protocol-samples.xml
touch $@
+left-right-relaxng: left-right-protocol-samples/.stamp
+ xmllint --noout --relaxng ../../schemas/relaxng/left-right.rng left-right-protocol-samples/*.xml
+
publication-protocol-samples/.stamp: publication-protocol-samples.xml split-protocol-samples.xsl
rm -rf publication-protocol-samples
mkdir publication-protocol-samples
xsltproc --param verbose 0 --stringparam dir publication-protocol-samples split-protocol-samples.xsl publication-protocol-samples.xml
touch $@
-relaxng: protocol-samples
- xmllint --noout --relaxng ../../schemas/relaxng/left-right-schema.rng left-right-protocol-samples/*.xml
- xmllint --noout --relaxng ../../schemas/relaxng/up-down-schema.rng up-down-protocol-samples/*.xml
- xmllint --noout --relaxng ../../schemas/relaxng/publication-schema.rng publication-protocol-samples/*.xml
+publication-relaxng: publication-protocol-samples/.stamp
+ xmllint --noout --relaxng ../../schemas/relaxng/publication.rng publication-protocol-samples/*.xml
+
+publication-control-protocol-samples/.stamp: publication-control-protocol-samples.xml split-protocol-samples.xsl
+ rm -rf publication-control-protocol-samples
+ mkdir publication-control-protocol-samples
+ xsltproc --param verbose 0 --stringparam dir publication-control-protocol-samples split-protocol-samples.xsl publication-control-protocol-samples.xml
+ touch $@
+
+publication-control-relaxng: publication-control-protocol-samples/.stamp
+ xmllint --noout --relaxng ../../schemas/relaxng/publication-control.rng publication-control-protocol-samples/*.xml
+
+rrdp-samples/.stamp: rrdp-samples.xml split-protocol-samples.xsl
+ rm -rf rrdp-samples
+ mkdir rrdp-samples
+ xsltproc --param verbose 0 --stringparam dir rrdp-samples split-protocol-samples.xsl rrdp-samples.xml
+ touch $@
+
+rrdp-relaxng: rrdp-samples/.stamp
+ xmllint --noout --relaxng ../../schemas/relaxng/rrdp.rng rrdp-samples/*.xml
+
+up-down-relaxng:
+ xmllint --noout --relaxng ../../schemas/relaxng/up-down.rng up-down-protocol-samples/*.xml
+
+relaxng: up-down-relaxng left-right-relaxng publication-relaxng publication-control-relaxng rrdp-relaxng
all-tests:: relaxng
-parse-test: protocol-samples
+parse-test: left-right-protocol-samples publication-protocol-samples publication-control-protocol-samples
${PYTHON} xml-parse-test.py
all-tests:: parse-test
@@ -67,7 +90,6 @@ YAMLTEST_CONFIG = smoketest.1.yaml
yamltest:
rm -rf yamltest.dir rcynic-data
- ${PYTHON} sql-cleaner.py
${PYTHON} yamltest.py ${YAMLTEST_CONFIG}
YAMLCONF_CONFIG = ${YAMLTEST_CONFIG}
diff --git a/ca/tests/bgpsec-yaml.py b/ca/tests/bgpsec-yaml.py
index 1562f86e..500d2b9d 100755
--- a/ca/tests/bgpsec-yaml.py
+++ b/ca/tests/bgpsec-yaml.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
#
# $Id$
-#
+#
# Copyright (C) 2014 Dragon Research Labs ("DRL")
#
# Permission to use, copy, modify, and distribute this software for any
@@ -30,28 +30,28 @@ root = "Root"
class Kid(object):
- def __init__(self, n):
- self.name = "ISP-%03d" % n
- self.ipv4 = "10.%d.0.0/16" % n
- self.asn = n
- self.router_id = n * 10000
+ def __init__(self, i):
+ self.name = "ISP-%03d" % i
+ self.ipv4 = "10.%d.0.0/16" % i
+ self.asn = i
+ self.router_id = i * 10000
- @property
- def declare(self):
- return dict(name = self.name,
- ipv4 = self.ipv4,
- asn = self.asn,
- hosted_by = root,
- roa_request = [dict(asn = self.asn, ipv4 = self.ipv4)],
- router_cert = [dict(asn = self.asn, router_id = self.router_id)])
+ @property
+ def declare(self):
+ return dict(name = self.name,
+ ipv4 = self.ipv4,
+ asn = self.asn,
+ hosted_by = root,
+ roa_request = [dict(asn = self.asn, ipv4 = self.ipv4)],
+ router_cert = [dict(asn = self.asn, router_id = self.router_id)])
- @property
- def del_routercert(self):
- return dict(name = self.name, router_cert_del = [dict(asn = self.asn, router_id = self.router_id)])
+ @property
+ def del_routercert(self):
+ return dict(name = self.name, router_cert_del = [dict(asn = self.asn, router_id = self.router_id)])
- @property
- def add_routercert(self):
- return dict(name = self.name, router_cert_add = [dict(asn = self.asn, router_id = self.router_id)])
+ @property
+ def add_routercert(self):
+ return dict(name = self.name, router_cert_add = [dict(asn = self.asn, router_id = self.router_id)])
kids = [Kid(n + 1) for n in xrange(200)]
@@ -72,14 +72,14 @@ docs.append([shell_first,
gym = kids[50:70]
for kid in gym:
- docs.append([shell_next,
- kid.del_routercert,
- sleeper])
+ docs.append([shell_next,
+ kid.del_routercert,
+ sleeper])
for kid in gym:
- docs.append([shell_next,
- kid.add_routercert,
- sleeper])
+ docs.append([shell_next,
+ kid.add_routercert,
+ sleeper])
print '''\
# This configuration was generated by a script. Edit at your own risk.
diff --git a/ca/tests/left-right-protocol-samples.xml b/ca/tests/left-right-protocol-samples.xml
index 7b97386d..9729c68c 100644
--- a/ca/tests/left-right-protocol-samples.xml
+++ b/ca/tests/left-right-protocol-samples.xml
@@ -37,15 +37,15 @@
<completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="create" tag="a000" self_handle="42"/>
+ <tenant action="create" tag="a000" tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="create" tag="a000" self_handle="42"/>
+ <tenant action="create" tag="a000" tenant_handle="42"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="set" self_handle="42"
+ <tenant action="set" tenant_handle="42"
rekey="yes"
reissue="yes"
revoke="yes"
@@ -92,19 +92,19 @@
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
</bpki_glue>
- </self>
+ </tenant>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="set" self_handle="42"/>
+ <tenant action="set" tenant_handle="42"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="get" self_handle="42"/>
+ <tenant action="get" tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="get" self_handle="42">
+ <tenant action="get" tenant_handle="42">
<bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
@@ -143,15 +143,15 @@
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
</bpki_glue>
- </self>
+ </tenant>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="list"/>
+ <tenant action="list"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="list" self_handle="42">
+ <tenant action="list" tenant_handle="42">
<bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
@@ -190,22 +190,22 @@
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
</bpki_glue>
- </self>
- <self action="list" self_handle="99"/>
+ </tenant>
+ <tenant action="list" tenant_handle="99"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="destroy" self_handle="42"/>
+ <tenant action="destroy" tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <self action="destroy" self_handle="42"/>
+ <tenant action="destroy" tenant_handle="42"/>
</msg>
<!-- ==== -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="create" self_handle="42" bsc_handle="17"
+ <bsc action="create" tenant_handle="42" bsc_handle="17"
generate_keypair="yes"
key_type="rsa"
hash_alg="sha256"
@@ -233,13 +233,13 @@
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="create" self_handle="42" bsc_handle="17">
+ <bsc action="create" tenant_handle="42" bsc_handle="17">
<pkcs10_request>cmVxdWVzdAo=</pkcs10_request>
</bsc>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="set" self_handle="42" bsc_handle="17">
+ <bsc action="set" tenant_handle="42" bsc_handle="17">
<signing_cert>
MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV
BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda
@@ -274,15 +274,15 @@
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="set" self_handle="42" bsc_handle="17"/>
+ <bsc action="set" tenant_handle="42" bsc_handle="17"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="get" self_handle="42" bsc_handle="17"/>
+ <bsc action="get" tenant_handle="42" bsc_handle="17"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="get" self_handle="42" bsc_handle="17">
+ <bsc action="get" tenant_handle="42" bsc_handle="17">
<signing_cert>
MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV
BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda
@@ -306,11 +306,11 @@
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="list" self_handle="42"/>
+ <bsc action="list" tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="get" self_handle="42" bsc_handle="17">
+ <bsc action="get" tenant_handle="42" bsc_handle="17">
<signing_cert>
MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV
BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda
@@ -334,24 +334,24 @@
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="destroy" self_handle="42" bsc_handle="17"/>
+ <bsc action="destroy" tenant_handle="42" bsc_handle="17"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <bsc action="destroy" self_handle="42" bsc_handle="17"/>
+ <bsc action="destroy" tenant_handle="42" bsc_handle="17"/>
</msg>
<!-- ==== -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="create" self_handle="42" parent_handle="666"
+ <parent action="create" tenant_handle="42" parent_handle="666"
peer_contact_uri="https://re.bar.example/bandicoot/"
sia_base="rsync://repo.foo.example/wombat/"
bsc_handle="17"
repository_handle="120"
sender_name="tweedledee"
recipient_name="tweedledum">
- <bpki_cms_cert>
+ <bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -369,8 +369,8 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_cert>
- <bpki_cms_glue>
+ </bpki_cert>
+ <bpki_glue>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -388,16 +388,16 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_glue>
+ </bpki_glue>
</parent>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="create" self_handle="42" parent_handle="666"/>
+ <parent action="create" tenant_handle="42" parent_handle="666"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="set" self_handle="42" parent_handle="666"
+ <parent action="set" tenant_handle="42" parent_handle="666"
peer_contact_uri="https://re.bar.example/bandicoot/"
sia_base="rsync://repo.foo.example/wombat/"
bsc_handle="17"
@@ -405,7 +405,7 @@
rekey="yes"
reissue="yes"
revoke="yes">
- <bpki_cms_cert>
+ <bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -423,8 +423,8 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_cert>
- <bpki_cms_glue>
+ </bpki_cert>
+ <bpki_glue>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -442,25 +442,25 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_glue>
+ </bpki_glue>
</parent>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="set" self_handle="42" parent_handle="666"/>
+ <parent action="set" tenant_handle="42" parent_handle="666"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="get" self_handle="42" parent_handle="666"/>
+ <parent action="get" tenant_handle="42" parent_handle="666"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="get" self_handle="42" parent_handle="666"
+ <parent action="get" tenant_handle="42" parent_handle="666"
peer_contact_uri="https://re.bar.example/bandicoot/"
sia_base="rsync://repo.foo.example/wombat/"
bsc_handle="17"
repository_handle="120">
- <bpki_cms_cert>
+ <bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -478,8 +478,8 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_cert>
- <bpki_cms_glue>
+ </bpki_cert>
+ <bpki_glue>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -497,21 +497,21 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_glue>
+ </bpki_glue>
</parent>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="list" self_handle="42"/>
+ <parent action="list" tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="list" self_handle="42" parent_handle="666"
+ <parent action="list" tenant_handle="42" parent_handle="666"
peer_contact_uri="https://re.bar.example/bandicoot/"
sia_base="rsync://repo.foo.example/wombat/"
bsc_handle="17"
repository_handle="120">
- <bpki_cms_cert>
+ <bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -529,8 +529,8 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_cert>
- <bpki_cms_glue>
+ </bpki_cert>
+ <bpki_glue>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
@@ -548,22 +548,22 @@
sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cms_glue>
+ </bpki_glue>
</parent>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="destroy" self_handle="42"
+ <parent action="destroy" tenant_handle="42"
parent_handle="666"/> </msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <parent action="destroy" self_handle="42" parent_handle="666"/>
+ <parent action="destroy" tenant_handle="42" parent_handle="666"/>
</msg>
<!-- ==== -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="create" self_handle="42" child_handle="3"
+ <child action="create" tenant_handle="42" child_handle="3"
bsc_handle="17">
<bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
@@ -588,11 +588,11 @@
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="create" self_handle="42" child_handle="3"/>
+ <child action="create" tenant_handle="42" child_handle="3"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="set" self_handle="42" child_handle="3"
+ <child action="set" tenant_handle="42" child_handle="3"
bsc_handle="17"
reissue="yes">
<bpki_cert>
@@ -618,15 +618,15 @@
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="set" self_handle="42" child_handle="3"/>
+ <child action="set" tenant_handle="42" child_handle="3"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="get" self_handle="42" child_handle="3"/>
+ <child action="get" tenant_handle="42" child_handle="3"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="get" self_handle="42" child_handle="3"
+ <child action="get" tenant_handle="42" child_handle="3"
bsc_handle="17">
<bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
@@ -651,11 +651,11 @@
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="list" self_handle="42"/>
+ <child action="list" tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="list" self_handle="42" child_handle="3"
+ <child action="list" tenant_handle="42" child_handle="3"
bsc_handle="17">
<bpki_cert>
MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
@@ -680,17 +680,17 @@
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="destroy" self_handle="42" child_handle="3"/>
+ <child action="destroy" tenant_handle="42" child_handle="3"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <child action="destroy" self_handle="42" child_handle="3"/>
+ <child action="destroy" tenant_handle="42" child_handle="3"/>
</msg>
<!-- ==== -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="create" self_handle="42" repository_handle="120"
+ <repository action="create" tenant_handle="42" repository_handle="120"
peer_contact_uri="https://re.bar.example/bandicoot/"
bsc_handle="17">
<bpki_cert>
@@ -735,11 +735,11 @@
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="create" self_handle="42" repository_handle="120"/>
+ <repository action="create" tenant_handle="42" repository_handle="120"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="set" self_handle="42" repository_handle="120"
+ <repository action="set" tenant_handle="42" repository_handle="120"
peer_contact_uri="https://re.bar.example/bandicoot/"
bsc_handle="17">
<bpki_cert>
@@ -784,15 +784,15 @@
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="set" self_handle="42" repository_handle="120"/>
+ <repository action="set" tenant_handle="42" repository_handle="120"/>
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="get" self_handle="42" repository_handle="120"/>
+ <repository action="get" tenant_handle="42" repository_handle="120"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="get" self_handle="42" repository_handle="120"
+ <repository action="get" tenant_handle="42" repository_handle="120"
peer_contact_uri="https://re.bar.example/bandicoot/"
bsc_handle="17">
<bpki_cert>
@@ -837,11 +837,11 @@
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="list" self_handle="42"/>
+ <repository action="list" tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="list" self_handle="42" repository_handle="120"
+ <repository action="list" tenant_handle="42" repository_handle="120"
peer_contact_uri="https://re.bar.example/bandicoot/"
bsc_handle="17">
<bpki_cert>
@@ -886,21 +886,21 @@
</msg>
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="destroy" self_handle="42" repository_handle="120"/>
+ <repository action="destroy" tenant_handle="42" repository_handle="120"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <repository action="destroy" self_handle="42" repository_handle="120"/>
+ <repository action="destroy" tenant_handle="42" repository_handle="120"/>
</msg>
<!-- ==== -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_resources self_handle="42" child_handle="289"/>
+ <list_resources tenant_handle="42" child_handle="289"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_resources self_handle="42" child_handle="289"
+ <list_resources tenant_handle="42" child_handle="289"
valid_until="2008-04-01T00:00:00Z"
ipv4="10.0.0.44/32,10.3.0.44/32"
ipv6="fe80:deed:f00d::/48,fe80:dead:beef:2::-fe80:dead:beef:2::49"
@@ -910,16 +910,16 @@
<!-- === -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_roa_requests self_handle="42"/>
+ <list_roa_requests tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_roa_requests self_handle="42"
+ <list_roa_requests tenant_handle="42"
asn="666"
ipv4="10.0.0.44/32,10.3.0.44/32"
ipv6="fe80:deed:f00d::/48,fe80:dead:beef::/48-56"
/>
- <list_roa_requests self_handle="42"
+ <list_roa_requests tenant_handle="42"
asn="12345"
ipv4="10.0.0.44/32"
ipv6="2002:a00::/48-56"
@@ -929,11 +929,11 @@
<!-- === -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_received_resources self_handle="42"/>
+ <list_received_resources tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_received_resources self_handle="42"
+ <list_received_resources tenant_handle="42"
parent_handle="Alice"
notBefore="2010-02-22T03:44:23Z"
notAfter="2011-02-21T11:03:49Z"
@@ -942,7 +942,7 @@
aia_uri="rsync://arin.rpki.net/arin/arin.cer"
asn="1280,3557"
ipv4="149.20.0.0/16,192.5.4.0/23,204.152.184.0/21"/>
- <list_received_resources self_handle="42"
+ <list_received_resources tenant_handle="42"
parent_handle="Bob"
uri="rsync://arin.rpki.net/arin/1/uWqpa8GkcEDBZkEsmOEofeDKk9s.cer"
notBefore="2010-02-22T03:44:20Z"
@@ -958,11 +958,11 @@
<!-- === -->
<msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_published_objects self_handle="42"/>
+ <list_published_objects tenant_handle="42"/>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <list_published_objects self_handle="42" uri="rsync://rpki.example.org/rpki/DEMEtlxZrZes7TNGbe7XwVSMgW0.crl">
+ <list_published_objects tenant_handle="42" uri="rsync://rpki.example.org/rpki/DEMEtlxZrZes7TNGbe7XwVSMgW0.crl">
MIIBrjCBlwIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEygwQzQzMDRCNjVDNTlBRDk3
QUNFRDMzNDY2REVFRDdDMTU0OEM4MTZEFw0wOTA5MjgyMDUxNDlaFw0wOTA5MjgyMTUxNDla
oDAwLjAfBgNVHSMEGDAWgBQMQwS2XFmtl6ztM0Zt7tfBVIyBbTALBgNVHRQEBAICAWkwDQYJ
@@ -973,7 +973,7 @@
fBk4i7H945v/zs7bLLMJxTs8+ao4iCDuknjbGhjWmi9xrTXDtcCXx607rPDkJQcJE2WnRS/U
HIA=
</list_published_objects>
- <list_published_objects self_handle="42" uri="rsync://rpki.example.org/rpki/DEMEtlxZrZes7TNGbe7XwVSMgW0.mft">
+ <list_published_objects tenant_handle="42" uri="rsync://rpki.example.org/rpki/DEMEtlxZrZes7TNGbe7XwVSMgW0.mft">
MIIHBQYJKoZIhvcNAQcCoIIG9jCCBvICAQMxDTALBglghkgBZQMEAgEwggEfBgsqhkiG9w0B
CRABGqCCAQ4EggEKMIIBBgICAWoYDzIwMDkwOTI4MjA1MTQ5WhgPMjAwOTA5MjgyMTUxNDla
BglghkgBZQMEAgEwgdIwRBYfREVNRXRseFpyWmVzN1ROR2JlN1h3VlNNZ1cwLmNybAMhAPgd
@@ -1009,7 +1009,7 @@
yML8lQJAFAyjnXJ+doGbqfTUpVH4q4drqRb73WbL0zf/Z2HGwhDlTmsAdjparWdQcfXIVrJF
ynS1fab9XZfj+VtBFKjooDjaLw==
</list_published_objects>
- <list_published_objects self_handle="42" uri="rsync://rpki.example.org/rpki/ldvxcHGdr3oKHcPj-gukmetNRZ0.roa">
+ <list_published_objects tenant_handle="42" uri="rsync://rpki.example.org/rpki/ldvxcHGdr3oKHcPj-gukmetNRZ0.roa">
MIIGnQYJKoZIhvcNAQcCoIIGjjCCBooCAQMxDTALBglghkgBZQMEAgEwMQYLKoZIhvcNAQkQ
ARigIgQgMB4CAg3lMBgwFgQCAAEwEDAGAwQAwAUEMAYDBADABQWgggSTMIIEjzCCA3egAwIB
AgIBAjANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEygwQzQzMDRCNjVDNTlBRDk3QUNFRDMz
@@ -1043,7 +1043,7 @@
+N931gu2r5I/XB/MGgGvXNWozK7RuMn55i5hMqI2NQs+/b7/AQU0+/i3g7SlLA8iZwHq49U2
ZXRCjLXcy0tQOWVsMnGfReN8oNDhHbc=
</list_published_objects>
- <list_published_objects self_handle="42" uri="rsync://rpki.example.org/rpki/xopNGcsB_p7eafYqXatmVV8HZd0.roa">
+ <list_published_objects tenant_handle="42" uri="rsync://rpki.example.org/rpki/xopNGcsB_p7eafYqXatmVV8HZd0.roa">
MIIGoQYJKoZIhvcNAQcCoIIGkjCCBo4CAQMxDTALBglghkgBZQMEAgEwMAYLKoZIhvcNAQkQ
ARigIQQfMB0CAgUAMBcwFQQCAAEwDzAFAwMAlRQwBgMEA8yYuKCCBJgwggSUMIIDfKADAgEC
AgEDMA0GCSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMTKDBDNDMwNEI2NUM1OUFEOTdBQ0VEMzM0
@@ -1083,11 +1083,11 @@
<!-- === -->
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <report_error self_handle="42" error_code="your_hair_is_on_fire">text string</report_error>
+ <report_error tenant_handle="42" error_code="your_hair_is_on_fire">text string</report_error>
</msg>
<msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/left-right-spec/">
- <report_error self_handle="42" error_code="your_hair_is_on_fire"/>
+ <report_error tenant_handle="42" error_code="your_hair_is_on_fire"/>
</msg>
</completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
diff --git a/ca/tests/myrpki-xml-parse-test.py b/ca/tests/myrpki-xml-parse-test.py
index 9db7ec57..d915ea5b 100644
--- a/ca/tests/myrpki-xml-parse-test.py
+++ b/ca/tests/myrpki-xml-parse-test.py
@@ -25,77 +25,77 @@ relaxng = lxml.etree.RelaxNG(file = "myrpki.rng")
tree = lxml.etree.parse("myrpki.xml").getroot()
if False:
- print lxml.etree.tostring(tree, pretty_print = True, encoding = "us-ascii", xml_declaration = True)
+ print lxml.etree.tostring(tree, pretty_print = True, encoding = "us-ascii", xml_declaration = True)
relaxng.assertValid(tree)
def showitems(y):
- if False:
- for k, v in y.items():
- if v:
- print " ", k, v
+ if False:
+ for k, v in y.items():
+ if v:
+ print " ", k, v
def tag(t):
- return "{http://www.hactrn.net/uris/rpki/myrpki/}" + t
+ return "{http://www.hactrn.net/uris/rpki/myrpki/}" + t
print "My handle:", tree.get("handle")
print "Children:"
for x in tree.getiterator(tag("child")):
- print " ", x
- print " Handle:", x.get("handle")
- print " ASNS: ", rpki.resource_set.resource_set_as(x.get("asns"))
- print " IPv4: ", rpki.resource_set.resource_set_ipv4(x.get("v4"))
- print " Valid: ", x.get("valid_until")
- showitems(x)
+ print " ", x
+ print " Handle:", x.get("handle")
+ print " ASNS: ", rpki.resource_set.resource_set_as(x.get("asns"))
+ print " IPv4: ", rpki.resource_set.resource_set_ipv4(x.get("v4"))
+ print " Valid: ", x.get("valid_until")
+ showitems(x)
print
print "ROA requests:"
for x in tree.getiterator(tag("roa_request")):
- print " ", x
- print " ASN: ", x.get("asn")
- print " IPv4:", rpki.resource_set.roa_prefix_set_ipv4(x.get("v4"))
- print " IPv6:", rpki.resource_set.roa_prefix_set_ipv6(x.get("v6"))
- showitems(x)
+ print " ", x
+ print " ASN: ", x.get("asn")
+ print " IPv4:", rpki.resource_set.roa_prefix_set_ipv4(x.get("v4"))
+ print " IPv6:", rpki.resource_set.roa_prefix_set_ipv6(x.get("v6"))
+ showitems(x)
print
def showpem(label, b64, kind):
- cmd = ("openssl", kind, "-noout", "-text", "-inform", "DER")
- if kind == "x509":
- cmd += ("-certopt", "no_pubkey,no_sigdump")
- p = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE)
- text = p.communicate(input = base64.b64decode(b64))[0]
- if p.returncode != 0:
- raise subprocess.CalledProcessError(returncode = p.returncode, cmd = cmd)
- print label, text
+ cmd = ("openssl", kind, "-noout", "-text", "-inform", "DER")
+ if kind == "x509":
+ cmd += ("-certopt", "no_pubkey,no_sigdump")
+ p = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE)
+ text = p.communicate(input = base64.b64decode(b64))[0]
+ if p.returncode != 0:
+ raise subprocess.CalledProcessError(returncode = p.returncode, cmd = cmd)
+ print label, text
for x in tree.getiterator(tag("child")):
- cert = x.findtext(tag("bpki_certificate"))
- if cert:
- showpem("Child", cert, "x509")
+ cert = x.findtext(tag("bpki_certificate"))
+ if cert:
+ showpem("Child", cert, "x509")
for x in tree.getiterator(tag("parent")):
- print "Parent URI:", x.get("service_uri")
- cert = x.findtext(tag("bpki_certificate"))
- if cert:
- showpem("Parent", cert, "x509")
+ print "Parent URI:", x.get("service_uri")
+ cert = x.findtext(tag("bpki_certificate"))
+ if cert:
+ showpem("Parent", cert, "x509")
ca = tree.findtext(tag("bpki_ca_certificate"))
if ca:
- showpem("CA", ca, "x509")
+ showpem("CA", ca, "x509")
bsc = tree.findtext(tag("bpki_bsc_certificate"))
if bsc:
- showpem("BSC EE", bsc, "x509")
+ showpem("BSC EE", bsc, "x509")
repo = tree.findtext(tag("bpki_repository_certificate"))
if repo:
- showpem("Repository", repo, "x509")
+ showpem("Repository", repo, "x509")
req = tree.findtext(tag("bpki_bsc_pkcs10"))
if req:
- showpem("BSC EE", req, "req")
+ showpem("BSC EE", req, "req")
crl = tree.findtext(tag("bpki_crl"))
if crl:
- showpem("CA", crl, "crl")
+ showpem("CA", crl, "crl")
diff --git a/ca/tests/old_irdbd.py b/ca/tests/old_irdbd.py
index d66e683e..d26c3476 100644
--- a/ca/tests/old_irdbd.py
+++ b/ca/tests/old_irdbd.py
@@ -15,5 +15,5 @@
# PERFORMANCE OF THIS SOFTWARE.
if __name__ == "__main__":
- import rpki.old_irdbd
- rpki.old_irdbd.main()
+ import rpki.old_irdbd
+ rpki.old_irdbd.main()
diff --git a/ca/tests/publication-control-protocol-samples.xml b/ca/tests/publication-control-protocol-samples.xml
new file mode 100644
index 00000000..e094f3f6
--- /dev/null
+++ b/ca/tests/publication-control-protocol-samples.xml
@@ -0,0 +1,155 @@
+<!-- -*- SGML -*-
+ - $Id$
+ -
+ - Copyright (C) 2008 American Registry for Internet Numbers ("ARIN")
+ -
+ - Permission to use, copy, modify, and distribute this software for any
+ - purpose with or without fee is hereby granted, provided that the above
+ - copyright notice and this permission notice appear in all copies.
+ -
+ - THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
+ - REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+ - AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
+ - INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+ - LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+ - OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+ - PERFORMANCE OF THIS SOFTWARE.
+ -
+ -
+ - This is a collection of sample publication protocol PDU samples
+ - to use as test cases for the publication protocol RelaxNG schema.
+ -->
+
+<completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
+
+ <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="create" client_handle="3" base_uri="rsync://wombat.invalid/">
+ <bpki_cert>
+ MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
+ BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
+ MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
+ b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
+ G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
+ Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
+ DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
+ uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
+ 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
+ diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
+ ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
+ hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
+ cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
+ XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
+ sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
+ YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
+ 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
+ </bpki_cert>
+ </client>
+ </msg>
+
+ <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="create" client_handle="3"/>
+ </msg>
+
+ <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="set" client_handle="3">
+ <bpki_glue>
+ MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
+ BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
+ MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
+ b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
+ G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
+ Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
+ DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
+ uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
+ 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
+ diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
+ ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
+ hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
+ cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
+ XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
+ sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
+ YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
+ 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
+ </bpki_glue>
+ </client>
+ </msg>
+
+ <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="set" client_handle="3"/>
+ </msg>
+
+ <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="get" client_handle="3"/>
+ </msg>
+
+ <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="get" client_handle="3" base_uri="rsync://wombat.invalid/">
+ <bpki_cert>
+ MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
+ BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
+ MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
+ b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
+ G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
+ Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
+ DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
+ uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
+ 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
+ diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
+ ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
+ hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
+ cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
+ XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
+ sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
+ YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
+ 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
+ </bpki_cert>
+ </client>
+ </msg>
+
+ <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="list"/>
+ </msg>
+
+ <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="list" client_handle="3">
+ <bpki_cert>
+ MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
+ BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
+ MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
+ b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
+ G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
+ Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
+ DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
+ uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
+ 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
+ diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
+ ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
+ hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
+ cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
+ XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
+ sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
+ YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
+ 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
+ </bpki_cert>
+ </client>
+ </msg>
+
+ <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="destroy" client_handle="3"/>
+ </msg>
+
+ <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <client action="destroy" client_handle="3"/>
+ </msg>
+
+ <!-- === -->
+
+ <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <report_error error_code="your_hair_is_on_fire">text string</report_error>
+ </msg>
+
+ <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-control/">
+ <report_error error_code="your_hair_is_on_fire"/>
+ </msg>
+
+</completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
diff --git a/ca/tests/publication-protocol-samples.xml b/ca/tests/publication-protocol-samples.xml
index 96b095a7..6d0a99a9 100644
--- a/ca/tests/publication-protocol-samples.xml
+++ b/ca/tests/publication-protocol-samples.xml
@@ -1,370 +1,107 @@
<!-- -*- SGML -*-
- - $Id$
+ - $Id$
-
- - Copyright (C) 2008 American Registry for Internet Numbers ("ARIN")
+ - Sample PDUs for RPKI publication protocol, from current I-D.
-
- - Permission to use, copy, modify, and distribute this software for any
- - purpose with or without fee is hereby granted, provided that the above
- - copyright notice and this permission notice appear in all copies.
+ - Copyright (c) 2014 IETF Trust and the persons identified as authors
+ - of the code. All rights reserved.
-
- - THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH
- - REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
- - AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT,
- - INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
- - LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
- - OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
- - PERFORMANCE OF THIS SOFTWARE.
+ - Redistribution and use in source and binary forms, with or without
+ - modification, are permitted provided that the following conditions
+ - are met:
-
+ - * Redistributions of source code must retain the above copyright
+ - notice, this list of conditions and the following disclaimer.
-
- - This is a collection of sample publication protocol PDU samples
- - to use as test cases for the publication protocol RelaxNG schema.
+ - * Redistributions in binary form must reproduce the above copyright
+ - notice, this list of conditions and the following disclaimer in
+ - the documentation and/or other materials provided with the
+ - distribution.
+ -
+ - * Neither the name of Internet Society, IETF or IETF Trust, nor the
+ - names of specific contributors, may be used to endorse or promote
+ - products derived from this software without specific prior written
+ - permission.
+ -
+ - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ - FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ - COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ - INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ - BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ - LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ - LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ - ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ - POSSIBILITY OF SUCH DAMAGE.
-->
<completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
- <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="query" version="1">
- <config action="set">
- <bpki_crl>
- MIIBezBlAgEBMA0GCSqGSIb3DQEBCwUAMCMxITAfBgNVBAMTGFRlc3QgQ2VydGlm
- aWNhdGUgcHViZCBUQRcNMDgwNjAyMjE0OTQ1WhcNMDgwNzAyMjE0OTQ1WqAOMAww
- CgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBAFWCWgBl4ljVqX/CHo+RpqYt
- vmKMnjPVflMXUB7i28RGP4DAq4l7deDU7Q82xEJyE4TXMWDWAV6UG6uUGum0VHWO
- cj9ohqyiZUGfOsKg2hbwkETm8sAENOsi1yNdyKGk6jZ16aF5fubxQqZa1pdGCSac
- 1/ZYC5sLLhEz3kmz+B9z9mXFVc5TgAh4dN3Gy5ftF8zZAFpDGnS4biCnRVqhGv6R
- 0Lh/5xmii+ZU6kNDhbeMsjJg+ZOmtN+wMeHSIbjiy0WuuaZ3k2xSh0C94anrHBZA
- vvCRhbazjR0Ef5OMZ5lcllw3uO8IHuoisHKkehy4Y0GySdj98fV+OuiRTH9vt/M=
- </bpki_crl>
- </config>
- </msg>
-
- <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="reply" version="1">
- <config action="set"/>
- </msg>
-
- <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="query" version="1">
- <config action="get"/>
- </msg>
-
- <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="reply" version="1">
- <config action="get">
- <bpki_crl>
- MIIBezBlAgEBMA0GCSqGSIb3DQEBCwUAMCMxITAfBgNVBAMTGFRlc3QgQ2VydGlm
- aWNhdGUgcHViZCBUQRcNMDgwNjAyMjE0OTQ1WhcNMDgwNzAyMjE0OTQ1WqAOMAww
- CgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBAFWCWgBl4ljVqX/CHo+RpqYt
- vmKMnjPVflMXUB7i28RGP4DAq4l7deDU7Q82xEJyE4TXMWDWAV6UG6uUGum0VHWO
- cj9ohqyiZUGfOsKg2hbwkETm8sAENOsi1yNdyKGk6jZ16aF5fubxQqZa1pdGCSac
- 1/ZYC5sLLhEz3kmz+B9z9mXFVc5TgAh4dN3Gy5ftF8zZAFpDGnS4biCnRVqhGv6R
- 0Lh/5xmii+ZU6kNDhbeMsjJg+ZOmtN+wMeHSIbjiy0WuuaZ3k2xSh0C94anrHBZA
- vvCRhbazjR0Ef5OMZ5lcllw3uO8IHuoisHKkehy4Y0GySdj98fV+OuiRTH9vt/M=
- </bpki_crl>
- </config>
- </msg>
-
- <!-- === -->
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="create" client_handle="3" base_uri="rsync://wombat.invalid/">
- <bpki_cert>
- MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
- BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
- MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
- b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
- G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
- Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
- DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
- uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
- 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
- diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
- ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
- hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
- cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
- XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
- sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
- YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
- 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cert>
- </client>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="create" client_handle="3"/>
- </msg>
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="set" client_handle="3">
- <bpki_glue>
- MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
- BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
- MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
- b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
- G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
- Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
- DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
- uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
- 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
- diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
- ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
- hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
- cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
- XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
- sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
- YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
- 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_glue>
- </client>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="set" client_handle="3"/>
- </msg>
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="get" client_handle="3"/>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="get" client_handle="3" base_uri="rsync://wombat.invalid/">
- <bpki_cert>
- MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
- BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
- MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
- b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
- G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
- Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
- DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
- uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
- 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
- diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
- ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
- hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
- cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
- XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
- sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
- YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
- 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cert>
- </client>
- </msg>
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="list"/>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="list" client_handle="3">
- <bpki_cert>
- MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV
- BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN
- MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS
- b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S
- G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G
- Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC
- DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM
- uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP
- 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ
- diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4
- ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq
- hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg
- cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4
- XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm
- sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH
- YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq
- 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ==
- </bpki_cert>
- </client>
- </msg>
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="destroy" client_handle="3"/>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <client action="destroy" client_handle="3"/>
- </msg>
-
- <!-- === -->
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <certificate action="publish" uri="rsync://wombat.invalid/testbed/RIR/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.cer">
- MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEyhERjRBODAxN0U2
- NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzMB4XDTA4MDUyMjE4MDUxMloXDTA4MDUy
- NDE3NTQ1M1owMzExMC8GA1UEAxMoOEZCODIxOEYwNkU1MEFCNzAyQTdEOTZEQzhGMENEQ0Q4
- MjhGN0YxNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMeziKp0k5nP7v6SZoNs
- XIMQYRgNtC6Fr/9Xm/1yQHomiPqHUk47rHhGojYiK5AhkrwoYhkH4UjJl2iwklDYczXuaBU3
- F5qrKlZ4aZnjIxdlP7+hktVpeApL6yuJTUAYeC3UIxnLDVdD6phydZ/FOQluffiNDjzteCCv
- oyOUatqt8WB+oND6LToHp028g1YUYLHG6mur0dPdcHOVXLSmUDuZ1HDz1nDuYvIVKjB/MpH9
- aW9XeaQ6ZFIlZVPwuuvI2brR+ThH7Gv27GL/o8qFdC300VQfoTZ+rKPGDE8K1cI906BL4kiw
- x9z0oiDcE96QCz+B0vsjc9mGaA1jgAxlXWsCAwEAAaOCAhcwggITMB0GA1UdDgQWBBSPuCGP
- BuUKtwKn2W3I8M3Ngo9/FzAfBgNVHSMEGDAWgBTfSoAX5mqekXLkYS2M9Mg/I43iozBVBgNV
- HR8ETjBMMEqgSKBGhkRyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rlc3RiZWQvUklSLzEvMzBx
- QUYtWnFucEZ5NUdFdGpQVElQeU9ONHFNLmNybDBFBggrBgEFBQcBAQQ5MDcwNQYIKwYBBQUH
- MAKGKXJzeW5jOi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJlZC9XT01CQVQuY2VyMBgGA1UdIAEB
- /wQOMAwwCgYIKwYBBQUHDgIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwgZsG
- CCsGAQUFBwELBIGOMIGLMDQGCCsGAQUFBzAFhihyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rl
- c3RiZWQvUklSL1IwLzEvMFMGCCsGAQUFBzAKhkdyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rl
- c3RiZWQvUklSL1IwLzEvajdnaGp3YmxDcmNDcDlsdHlQRE56WUtQZnhjLm1uZjAaBggrBgEF
- BQcBCAEB/wQLMAmgBzAFAgMA/BUwPgYIKwYBBQUHAQcBAf8ELzAtMCsEAgABMCUDAwAKAzAO
- AwUAwAACAQMFAcAAAiAwDgMFAsAAAiwDBQDAAAJkMA0GCSqGSIb3DQEBCwUAA4IBAQCEhuH7
- jtI2PJY6+zwv306vmCuXhtu9Lr2mmRw2ZErB8EMcb5xypMrNqMoKeu14K2x4a4RPJkK4yATh
- M81FPNRsU5mM0acIRnAPtxjHvPME7PHN2w2nGLASRsZmaa+b8A7SSOxVcFURazENztppsolH
- eTpm0cpLItK7mNpudUg1JGuFo94VLf1MnE2EqARG1vTsNhel/SM/UvOArCCOBvf0Gz7kSuup
- DSZ7qx+LiDmtEsLdbGNQBiYPbLrDk41PHrxdx28qIj7ejZkRzNFw/3pi8/XK281h8zeHoFVu
- 6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbtTdPcXBauY
- </certificate>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <certificate action="publish" uri="rsync://wombat.invalid/testbed/RIR/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.cer"/>
- </msg>
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="query" version="3">
+ <!-- Zero or more PDUs -->
+ </msg>
+
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="reply" version="3">
+ <!-- Zero or more PDUs -->
+ </msg>
+
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="query" version="3">
+ <publish uri="rsync://wombat.example/Alice/blCrcCp9ltyPDNzYKPfxc.cer">
+ MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEyhE
+ RjRBODAxN0U2NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzMB4XDTA4
+ MDUyMjE4MDUxMloXDTA4MDUyNDE3NTQ1M1owMzExMC8GA1UEAxMoOEZCODIx
+ OEYwNkU1MEFCNzAyQTdEOTZEQzhGMENEQ0Q4MjhGN0YxNzCCASIwDQYJKoZI
+ hvcNAQEBBQADggEPADCCAQoCggEBAMeziKp0k5nP7v6SZoNsXIMQYRgNtC6F
+ r/9Xm/1yQHomiPqHUk47rHhGojYiK5AhkrwoYhkH4UjJl2iwklDYczXuaBU3
+ F5qrKlZ4aZnjIxdlP7+hktVpeApL6yuJTUAYeC3UIxnLDVdD6phydZ/FOQlu
+ ffiNDjzteCCvoyOUatqt8WB+oND6LToHp028g1YUYLHG6mur0dPdcHOVXLSm
+ UDuZ1HDz1nDuYvIVKjB/MpH9aW9XeaQ6ZFIlZVPwuuvI2brR+ThH7Gv27GL/
+ o8qFdC300VQfoTZ+rKPGDE8K1cI906BL4kiwx9z0oiDcE96QCz+B0vsjc9mG
+ aA1jgAxlXWsCAwEAAaOCAhcwggITMB0GA1UdDgQWBBSPuCGPBuUKtwKn2W3I
+ 8M3Ngo9/FzAfBgNVHSMEGDAWgBTfSoAX5mqekXLkYS2M9Mg/I43iozBVBgNV
+ HR8ETjBMMEqgSKBGhkRyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rlc3RiZWQv
+ UklSLzEvMzBxQUYtWnFucEZ5NUdFdGpQVElQeU9ONHFNLmNybDBFBggrBgEF
+ BQcBAQQ5MDcwNQYIKwYBBQUHMAKGKXJzeW5jOi8vbG9jYWxob3N0OjQ0MDAv
+ dGVzdGJlZC9XT01CQVQuY2VyMBgGA1UdIAEB/wQOMAwwCgYIKwYBBQUHDgIw
+ DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwgZsGCCsGAQUFBwEL
+ BIGOMIGLMDQGCCsGAQUFBzAFhihyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rl
+ c3RiZWQvUklSL1IwLzEvMFMGCCsGAQUFBzAKhkdyc3luYzovL2xvY2FsaG9z
+ dDo0NDAwL3Rlc3RiZWQvUklSL1IwLzEvajdnaGp3YmxDcmNDcDlsdHlQRE56
+ WUtQZnhjLm1uZjAaBggrBgEFBQcBCAEB/wQLMAmgBzAFAgMA/BUwPgYIKwYB
+ BQUHAQcBAf8ELzAtMCsEAgABMCUDAwAKAzAOAwUAwAACAQMFAcAAAiAwDgMF
+ AsAAAiwDBQDAAAJkMA0GCSqGSIb3DQEBCwUAA4IBAQCEhuH7jtI2PJY6+zwv
+ 306vmCuXhtu9Lr2mmRw2ZErB8EMcb5xypMrNqMoKeu14K2x4a4RPJkK4yATh
+ M81FPNRsU5mM0acIRnAPtxjHvPME7PHN2w2nGLASRsZmaa+b8A7SSOxVcFUR
+ azENztppsolHeTpm0cpLItK7mNpudUg1JGuFo94VLf1MnE2EqARG1vTsNhel
+ /SM/UvOArCCOBvf0Gz7kSuupDSZ7qx+LiDmtEsLdbGNQBiYPbLrDk41PHrxd
+ x28qIj7ejZkRzNFw/3pi8/XK281h8zeHoFVu6ghRPy5dbOA4akX/KG6b8XIx
+ 0iwPYdLiDbdWFbtTdPcXBauY
+ </publish>
+ </msg>
+
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="reply" version="3">
+ <publish uri="rsync://wombat.example/Alice/blCrcCp9ltyPDNzYKPfxc.cer"/>
+ </msg>
+
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="reply" version="3">
+ <report_error error_code="your_hair_is_on_fire">
+ Shampooing with sterno again, are we?
+ </report_error>
+ </msg>
+
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="reply" version="3">
+ <report_error error_code="your_hair_is_on_fire"/>
+ </msg>
+
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="query" version="3">
+ <withdraw uri="rsync://wombat.example/Alice/blCrcCp9ltyPDNzYKPfxc.cer" hash="deadf00d"/>
+ </msg>
+
+ <msg xmlns="http://www.hactrn.net/uris/rpki/publication-spec/" type="reply" version="3">
+ <withdraw uri="rsync://wombat.example/Alice/blCrcCp9ltyPDNzYKPfxc.cer"/>
+ </msg>
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <certificate action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.cer"/>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <certificate action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.cer"/>
- </msg>
-
- <!-- === -->
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <crl action="publish" uri="rsync://wombat.invalid/testbed/RIR/1/30qAF-ZqnpFy5GEtjPTIPyON4qM.crl">
- MIIBwzCBrAIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEyhERjRBODAxN0U2NkE5RTkx
- NzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzFw0wODA1MjIxODA0MTZaFw0wODA1MjIxODA1MTZa
- MBQwEgIBAhcNMDgwNTIyMTc1ODQwWqAvMC0wHwYDVR0jBBgwFoAU30qAF+ZqnpFy5GEtjPTI
- PyON4qMwCgYDVR0UBAMCAQYwDQYJKoZIhvcNAQELBQADggEBAKkM0Fb/pJpHVHWZyjp4wojH
- W2KkvA/DFtBiz3moxocSnkDVP3QI19uVvqdC6nH3hJyFmsAMwULR0f1XU/V4j+X+FqYEl6Nv
- p8zAEPIB4r8xbEFs7udRwXRAjkJmOQbv9aomF2i+d7jpTFVJxShZWOgsoGEhIy/aktKQrOIR
- c4ZDrXpQwXVj2Y7+cGVfQ4gvnPOdlyLcnNovoegazATvA3EcidBNPWRg7XTCz0LVBEB7JgPd
- nNyXRg35HdMEHBl7U9uUQJXP7S02oaQ1ehNDMfaJPgBBpQtAnM1lIzJfevd9+e4ywGsRpxAV
- 8wxTXSPd1jwuKtS0kwrgsrQ8Ya85xUE=
- </crl>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <crl action="publish" uri="rsync://wombat.invalid/testbed/RIR/1/30qAF-ZqnpFy5GEtjPTIPyON4qM.crl"/>
- </msg>
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <crl action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/1/30qAF-ZqnpFy5GEtjPTIPyON4qM.crl"/>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <crl action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/1/30qAF-ZqnpFy5GEtjPTIPyON4qM.crl"/>
- </msg>
-
- <!-- === -->
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <manifest action="publish" uri="rsync://wombat.invalid/testbed/RIR/R0/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.mft">
- MIIHCgYJKoZIhvcNAQcCoIIG+zCCBvcCAQMxDTALBglghkgBZQMEAgEwggEeBgsqhkiG9w0B
- CRABGqCCAQ0EggEJMIIBBQIBEhgPMjAwODA1MjIxODA1MTVaGA8yMDA4MDUyMjE4MDYxNVoG
- CWCGSAFlAwQCATCB0jBEFh9ZbTVUTzRJYnlDb0pNZ3E2R2o4dG41Mng5U0UuY2VyAyEA4L8Z
- WMyuhOx+o6kUfsRR++QjSaRaATy4UOeVtjvZVqYwRBYfWnRxbjB3NEVFbU9hclAzQmd1SUY3
- MDhhNTM4LmNlcgMhAGQI1gYJotxWmwzcmpLNFZJ656uWOjcPYANlbNz80xm8MEQWH2xxa1Vx
- RHEwMDBESW9ZVjlybXdLTGdrN2F6by5jZXIDIQB7jRAEpkPvc4s4PX9vDvnTifj3BIE145FO
- 1ne2kEejVqCCBBEwggQNMIIC9aADAgECAgEFMA0GCSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMT
- KDhGQjgyMThGMDZFNTBBQjcwMkE3RDk2REM4RjBDRENEODI4RjdGMTcwHhcNMDgwNTIyMTc1
- NzQ5WhcNMDgwNTI0MTc1NDUzWjAzMTEwLwYDVQQDEyhERkRBMjMyMUJENEVCMDNFQTE1RkUy
- N0NGRkRGMEFGRkU1QjBFNjY4MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2/Gk
- AHW5pDqye0+TvUp7sl0rVgmTfeHpVp18ypxvuovogVJgkjEtBEikfaFU0646wYD6JM6IJFJX
- lWLWd7bVmhkWViKuZL0VmT2wpUToNHCLUGUQUVVX8R7oSHFdTArv2AqH+6yt0LmczDH1y2M6
- 2Tgkz9wZ9ryyuPx3VX4PkHzUMlkGFICj1fvyXkcAu8jBaxR9UME1c413TPaMi6lMh1HUmtVN
- LJMP5+/SnwEAW/Z3dPClCFIgQXK3nAKPVzAIwADEiqhK7cSchhO7ikI1CVt0XzG4n7oaILc3
- Hq/DAxyiutw5GlkUlKPri2YJzJ3+H4P+TveSa/b02fVA5csm/QIDAQABo4IBKjCCASYwHQYD
- VR0OBBYEFN/aIyG9TrA+oV/ifP/fCv/lsOZoMB8GA1UdIwQYMBaAFI+4IY8G5Qq3AqfZbcjw
- zc2Cj38XMFgGA1UdHwRRME8wTaBLoEmGR3JzeW5jOi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJl
- ZC9SSVIvUjAvMS9qN2doandibENyY0NwOWx0eVBETnpZS1BmeGMuY3JsMGAGCCsGAQUFBwEB
- BFQwUjBQBggrBgEFBQcwAoZEcnN5bmM6Ly9sb2NhbGhvc3Q6NDQwMC90ZXN0YmVkL1JJUi8x
- L2o3Z2hqd2JsQ3JjQ3A5bHR5UEROellLUGZ4Yy5jZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEF
- BQcOAjAOBgNVHQ8BAf8EBAMCB4AwDQYJKoZIhvcNAQELBQADggEBADpsE9HfgVTgmX1WeJTE
- fm87CXuOoGH85RFiAngSt5kR4gYCyadklOZ7Eta+ERUZVu4tcKO6sJOTuHPfVrAvR0VpgH+j
- PvXboYWSfwJdi00BC28ScrVM2zarA7B10+J6Oq8tbFlAyVBkrbuPet/axmndBtGWhrBTynGl
- nc/5L371Lxy6CrOYqXO0Qx3SrOKaailAe3zTIpHQeACqnPdL00zIBw/hVy/VNaH1wy+FmhAz
- TsmsQUrMyovJcu/ry5w0KHlP8BTnqfykikCWR+Lw0VQHmpJGAbtrmsOeIbfLY1zl7A81lDAl
- AG/ZH1DUdDOUIXMLHWur+D2rwjp7RL16LHYxggGqMIIBpgIBA4AU39ojIb1OsD6hX+J8/98K
- /+Ww5mgwCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEaMBwGCSqG
- SIb3DQEJBTEPFw0wODA1MjIxODA1MTVaMC8GCSqGSIb3DQEJBDEiBCBj/GjEQw3LgKPf5DTz
- 8eu1fcp6/cQjqqne6ZqFkF42azANBgkqhkiG9w0BAQEFAASCAQBOY0uHNMwy/o1nFANSgha5
- PZxt8fz+wTrbeomCb+lxqQKq1clcSiQORVGc8NmqC8sS5OR3eTw/3qnK9yPHxz2UQ4hn1pBa
- +Zy5veM61qMaXCw6w98EyNcvUfA1AkezAjkabfHQDs3o4Ezh49thXXyRcBoF+O6Lmi+LZbT2
- 4jvfFbaXW9zsb6/DaoDkeHnlk+YYgfSP4wOnkK5uqxtDW8QpMPq3GGdIp0oJDkzEdj7VsWIL
- 9JP2mxxL8fTPVUyAPOmURYwYDXqhke2O9eVDiCYhrEfB8/84Rint4Cj8n5aCujnAtqtwxHpD
- 0NRYO/V1MjhG+ARy1vRH1Dm0r92RBam3
- </manifest>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <manifest action="publish" uri="rsync://wombat.invalid/testbed/RIR/R0/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.mft"/>
- </msg>
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <manifest action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/R0/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.mft"/>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <manifest action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/R0/1/j7ghjwblCrcCp9ltyPDNzYKPfxc.mft"/>
- </msg>
-
- <!-- === -->
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <roa action="publish" uri="rsync://wombat.invalid/testbed/RIR/R0/1/lqkUqDq000DIoYV9rmwKLgk7azo.roa">
- MIIGmwYJKoZIhvcNAQcCoIIGjDCCBogCAQMxDTALBglghkgBZQMEAgEwKgYLKoZIhvcNAQkQ
- ARigGwQZMBcCAgKaMBEwDwQCAAEwCTAHAwUACgMALKCCBJgwggSUMIIDfKADAgECAgEJMA0G
- CSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMTKDhGQjgyMThGMDZFNTBBQjcwMkE3RDk2REM4RjBD
- RENEODI4RjdGMTcwHhcNMDgwNTIyMTc1ODI0WhcNMDgwNTI0MTc1NDUzWjAzMTEwLwYDVQQD
- Eyg5NkE5MTRBODNBQjREMzQwQzhBMTg1N0RBRTZDMEEyRTA5M0I2QjNBMIIBIjANBgkqhkiG
- 9w0BAQEFAAOCAQ8AMIIBCgKCAQEApoK50BjW5bcF4gsdaYhndtVADZvQk3RCsvuqDElF6uLi
- 9BYQq/NHyDOIMyJtvCmzjdv3Y135n1sNO7YvssqHlt7dMfCQTD5ND1GpFnQLdWP7stWM5AbO
- nJV6+PtDITUA/QHOli7Do0YCUgR6G+1QJsMu0DK+TRSzBJ6WP7WIYOBOOg3y/NKc1rkWhS1Q
- dcQepbHgQYZHzzpjNDR6+oYVuhuUEWx1P6O4pv/p+tpE0SDua7jBjMywIYHkPQBecf2IX1RU
- WNojB9dJlnRx5YUUneP2SvF2MrmdDbclgzwhf6alqD2OjiMuoBOG8yeTKcuhzCMnrFAklbst
- 6x3Rnq9BswIDAQABo4IBsTCCAa0wHQYDVR0OBBYEFJapFKg6tNNAyKGFfa5sCi4JO2s6MB8G
- A1UdIwQYMBaAFI+4IY8G5Qq3AqfZbcjwzc2Cj38XMFgGA1UdHwRRME8wTaBLoEmGR3JzeW5j
- Oi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJlZC9SSVIvUjAvMS9qN2doandibENyY0NwOWx0eVBE
- TnpZS1BmeGMuY3JsMGAGCCsGAQUFBwEBBFQwUjBQBggrBgEFBQcwAoZEcnN5bmM6Ly9sb2Nh
- bGhvc3Q6NDQwMC90ZXN0YmVkL1JJUi8xL2o3Z2hqd2JsQ3JjQ3A5bHR5UEROellLUGZ4Yy5j
- ZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEFBQcOAjAOBgNVHQ8BAf8EBAMCB4AwYwYIKwYBBQUH
- AQsEVzBVMFMGCCsGAQUFBzALhkdyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rlc3RiZWQvUklS
- L1IwLzEvbHFrVXFEcTAwMERJb1lWOXJtd0tMZ2s3YXpvLnJvYTAgBggrBgEFBQcBBwEB/wQR
- MA8wDQQCAAEwBwMFAAoDACwwDQYJKoZIhvcNAQELBQADggEBAL8iHwsyGOYhhIf3nVuL361y
- TOJSP8SR0mtQLHULPl+GkYk+5MRNWtL8ucTXFvniYJtOCXEGGEIO9eDXvkQIXQSz/qbF9URQ
- fuf38ghRza257syVhal6UHTgCFYuRIO9CUjcU1vkWUxH05BBIHlYdtlIQbAG/mRsCPCEgSmG
- bbQaomGlUOqmJMlKxLLcoAtz2vDrwVotgHyfS5h2mgINFjnlLcNLTci+sfs7/aQAkDYx7K98
- se/ZlMorvGkFNhHoOTcGIrWkYsfkbTygVwWRm278PaB3o4449Kvsg/gb8BZeHXRs68cr5Mcf
- jP7Q6jeypjTgDBnwb1yzoJIKWszFuSgxggGqMIIBpgIBA4AUlqkUqDq000DIoYV9rmwKLgk7
- azowCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEYMBwGCSqGSIb3
- DQEJBTEPFw0wODA1MjIxNzU4MjRaMC8GCSqGSIb3DQEJBDEiBCDCyf9v9Wed515TRp2WwnyM
- 1rk6dB///X+aqIym2e9jdTANBgkqhkiG9w0BAQEFAASCAQAFvzrHeRPW+wn4WSyoyBEq0zKS
- Cyh5tu1qTR0NHs6Rr/p8Pk81P1HQLND/U+znJZKLWlO2niEHUXPIicPDYchbj8ApH9VxKA+1
- lCWllOzFAsYyZFr3/VNs9pVp2eT4F9eEYBrBVDSNrD72MMTlWm1T5MEXqltTJJOCKzUEX96x
- 91iW6A+4erop7S8hpCnxqkTin4bFVreqYcGc4CC4bh+L9pPqJnURcEk7Qeu/WEHQBm38voB4
- S11qRZNrJMQ99oiJR7hXDIBm66HjGqoUL2gPCfpgJEVVnM9pVv2k889z4eTTck2Qj54gga2W
- Xkvw4Je420aDx88s9T2+PqXcbZ4g
- </roa>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <roa action="publish" uri="rsync://wombat.invalid/testbed/RIR/R0/1/lqkUqDq000DIoYV9rmwKLgk7azo.roa"/>
- </msg>
-
- <msg version="1" type="query" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <roa action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/R0/1/lqkUqDq000DIoYV9rmwKLgk7azo.roa"/>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <roa action="withdraw" uri="rsync://wombat.invalid/testbed/RIR/R0/1/lqkUqDq000DIoYV9rmwKLgk7azo.roa"/>
- </msg>
-
- <!-- === -->
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <report_error error_code="your_hair_is_on_fire">text string</report_error>
- </msg>
-
- <msg version="1" type="reply" xmlns="http://www.hactrn.net/uris/rpki/publication-spec/">
- <report_error error_code="your_hair_is_on_fire"/>
- </msg>
</completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
diff --git a/ca/tests/rrdp-samples.xml b/ca/tests/rrdp-samples.xml
new file mode 100644
index 00000000..966d9887
--- /dev/null
+++ b/ca/tests/rrdp-samples.xml
@@ -0,0 +1,81 @@
+<!-- -*- SGML -*-
+ - $Id$
+ -
+ - This is a collection of sample RRDP PDU samples to use as test
+ - cases for the RRDP RelaxNG schema.
+ -
+ - Need to figure out whose copyright should be on these examples.
+ - BSD in any case so makes little practical difference, just need to
+ - be sure we give proper credit. Might be RIPE, might be IETF
+ - Trust, might be us for derivative work. Slap ours on for the
+ - moment, fix when we figure this out.
+ -
+ - Copyright (C) 2014 Dragon Research Labs ("DRL")
+ -
+ - Permission to use, copy, modify, and distribute this software for any
+ - purpose with or without fee is hereby granted, provided that the above
+ - copyright notice and this permission notice appear in all copies.
+ -
+ - THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH
+ - REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+ - AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT,
+ - INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+ - LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+ - OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+ - PERFORMANCE OF THIS SOFTWARE.
+ -->
+
+<completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
+
+ <!-- Notification file: lists current snapshots and deltas -->
+
+ <notification xmlns="http://www.ripe.net/rpki/rrdp" version="1" session_id="d9f6dc91-0394-40b9-9663-66aef4bb623a" serial="185">
+ <snapshot uri="http://host.example/d9f6dc91-0394-40b9-9663-66aeb623a/snapshot/202.xml" hash="279b79fd8389e20585f26735ee70e0e4d4b8af23bb2e2e611c70e92d2433edea"/>
+ <delta serial="183" uri="http://host.example/d9f6c91-0394-40b9-9663-66aeb623a/deltas/183.xml" hash="a2d56ec180f2dde2a46bf90565932e25829b852a0b43107d5de6e41394c29100"/>
+ <delta serial="184" uri="http://host.example/d9f6c91-0394-40b9-9663-66aeb623a/deltas/184.xml" hash="a2d56ec180f2dde2a46b2e0565932e25829b852a0b43107d5de6e41394c29200"/>
+ <delta serial="185" uri="http://host.example/d9f6c91-0394-40b9-9663-66aeb623a/deltas/185.xml" hash="a2d56ec180f2dde2a46b2e0565932e25829b852a0b43107d5de6e41394c29201"/>
+ </notification>
+
+ <!-- Snapshot segment: think DNS AXFR -->
+
+ <snapshot version="1" xmlns="http://www.ripe.net/rpki/rrdp" session_id="d9f6dc91-0394-40b9-9663-66aef4bb623a" serial="1">
+ <publish uri="http://host.example/foo/bar/cer1.cer">
+ MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQD
+ jRBODAxN0U2NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzMB4XE
+ h8zeHoFVu6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbtTdPcXBau
+ </publish>
+ <publish uri="http://host.example/foo/bar/cer2.cer">
+ MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQD
+ h8zeHoFVu6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbtTdPcXBau
+ jRBODAxN0U2NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzMB4XD
+ </publish>
+ <publish uri="http://host.example/foo/bar/cer3.cer">
+ MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQD
+ h8zeHoFVu6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbtTdPcXBau
+ jRBODAxN0U2NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzMB4XD
+ </publish>
+ </snapshot>
+
+ <!-- Delta segment: think DNS IXFR -->
+
+ <delta version="1" xmlns="http://www.ripe.net/rpki/rrdp" session_id="d9f6dc91-0394-40b9-9663-66aef4bb623a" serial="3">
+ <publish uri="http://host.example/foo/bar/cer1.cer">
+ MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEw
+ jRBODAxN0U2NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFM
+ h8zeHoFVu6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbt
+ </publish>
+ <withdraw uri="http://host.example/foo/bar/cer1.cer" hash="deadf00d"/>
+ <publish uri="http://host.example/foo/bar/cer2.cer">
+ MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEw
+ h8zeHoFVu6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbt
+ jRBODAxN0U2NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFM
+ </publish>
+ <publish uri="http://host.example/foo/bar/cer3.cer" hash="deadf00d">
+ MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEw
+ h8zeHoFVu6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbt
+ jRBODAxN0U2NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFM
+ </publish>
+ <withdraw uri="http://host.example/foo/bar/cer4.cer" hash="deadf00d"/>
+ </delta>
+
+</completely_gratuitous_wrapper_element_to_let_me_run_this_through_xmllint>
diff --git a/ca/tests/smoketest.6.yaml b/ca/tests/smoketest.6.yaml
index e8d65433..e33e75c2 100644
--- a/ca/tests/smoketest.6.yaml
+++ b/ca/tests/smoketest.6.yaml
@@ -54,6 +54,9 @@ kids:
roa_request:
- asn: 64533
ipv6: 2001:db8::80/121
+ router_cert:
+ - router_id: 666
+ asn: 64533
ghostbusters:
- |
BEGIN:VCARD
diff --git a/ca/tests/smoketest.py b/ca/tests/smoketest.py
index 32f11cc3..9d82c640 100644
--- a/ca/tests/smoketest.py
+++ b/ca/tests/smoketest.py
@@ -47,8 +47,7 @@ import rpki.http
import rpki.log
import rpki.left_right
import rpki.config
-import rpki.publication
-import rpki.async
+import rpki.publication_control
from rpki.mysql_import import MySQLdb
@@ -68,7 +67,7 @@ parser.add_argument("yaml_file", type = argparse.FileType("r"),
help = "YAML description of test network")
args = parser.parse_args()
-cfg = rpki.config.parser(args.config, "smoketest", allow_missing = True)
+cfg = rpki.config.parser(set_filename = args.config, section = "smoketest", allow_missing = True)
# Load the YAML script early, so we can report errors ASAP
@@ -77,13 +76,14 @@ yaml_script = [y for y in yaml.safe_load_all(args.yaml_file)]
# Define port allocator early, so we can use it while reading config
def allocate_port():
- """
- Allocate a TCP port number.
- """
- global base_port
- p = base_port
- base_port += 1
- return p
+ """
+ Allocate a TCP port number.
+ """
+
+ global base_port
+ p = base_port
+ base_port += 1
+ return p
# Most filenames in the following are relative to the working directory.
@@ -139,14 +139,14 @@ pubd_last_cms_time = None
ecdsa_params = None
class CantRekeyYAMLLeaf(Exception):
- """
- Can't rekey YAML leaf.
- """
+ """
+ Can't rekey YAML leaf.
+ """
class CouldntIssueBSCEECertificate(Exception):
- """
- Couldn't issue BSC EE certificate
- """
+ """
+ Couldn't issue BSC EE certificate
+ """
sql_conversions = MySQLdb.converters.conversions.copy()
sql_conversions.update({
@@ -154,195 +154,202 @@ sql_conversions.update({
MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None })
def main():
- """
- Main program.
- """
-
- rpki.log.init(smoketest_name, argparse.Namespace(log_level = logging.DEBUG,
- log_handler = lambda: logging.StreamHandler(sys.stdout)))
- logger.info("Starting")
-
- rpki.http.http_client.timeout = rpki.sundial.timedelta(hours = 1)
-
- pubd_process = None
- rootd_process = None
- rsyncd_process = None
-
- rpki_sql = mangle_sql(rpki_sql_file)
- irdb_sql = mangle_sql(irdb_sql_file)
- pubd_sql = mangle_sql(pub_sql_file)
-
- logger.info("Initializing test directory")
-
- # Connect to test directory, creating it if necessary
- try:
- os.chdir(smoketest_dir)
- except OSError:
- os.makedirs(smoketest_dir)
- os.chdir(smoketest_dir)
-
- # Now that we're in the right directory, we can figure out whether
- # we have a private openssl executable to use
- global prog_openssl
- if not os.path.exists(prog_openssl):
- prog_openssl = "openssl"
-
- # Discard everything but keys, which take a while to generate.
- # Apparently os.walk() can't tell the difference between directories
- # and symlinks to directories, so we have to handle both.
- for root, dirs, files in os.walk(".", topdown = False):
- for fn in files:
- if not fn.endswith(".key"):
- os.remove(os.path.join(root, fn))
- for d in dirs:
- try:
- os.rmdir(os.path.join(root, d))
- except OSError, e:
- if e.errno == errno.ENOTDIR:
- os.remove(os.path.join(root, d))
- else:
- raise
-
- logger.info("Reading master YAML configuration")
- y = yaml_script.pop(0)
-
- logger.info("Constructing internal allocation database")
- db = allocation_db(y)
+ """
+ Main program.
+ """
- logger.info("Constructing BPKI keys and certs for rootd")
- setup_bpki_cert_chain(rootd_name, ee = ("RPKI",))
+ rpki.log.init(smoketest_name, argparse.Namespace(log_level = logging.DEBUG,
+ log_handler = lambda: logging.StreamHandler(sys.stdout)))
+ logger.info("Starting")
- logger.info("Constructing BPKI keys and certs for pubd")
- setup_bpki_cert_chain(pubd_name, ee = ("PUBD", "IRBE"))
+ rpki.http.http_client.timeout = rpki.sundial.timedelta(hours = 1)
+ pubd_process = None
+ rootd_process = None
+ rsyncd_process = None
- for a in db:
- a.setup_bpki_certs()
+ rpki_sql = mangle_sql(rpki_sql_file)
+ irdb_sql = mangle_sql(irdb_sql_file)
+ pubd_sql = mangle_sql(pub_sql_file)
- setup_publication(pubd_sql)
- setup_rootd(db.root, y.get("rootd", {}))
- setup_rsyncd()
- setup_rcynic()
+ logger.info("Initializing test directory")
- for a in db.engines:
- a.setup_conf_file()
- a.setup_sql(rpki_sql, irdb_sql)
- a.sync_sql()
+ # Connect to test directory, creating it if necessary
+ try:
+ os.chdir(smoketest_dir)
+ except OSError:
+ os.makedirs(smoketest_dir)
+ os.chdir(smoketest_dir)
+
+ # Now that we're in the right directory, we can figure out whether
+ # we have a private openssl executable to use
+ global prog_openssl
+ if not os.path.exists(prog_openssl):
+ prog_openssl = "openssl"
+
+ # Discard everything but keys, which take a while to generate.
+ # Apparently os.walk() can't tell the difference between directories
+ # and symlinks to directories, so we have to handle both.
+ for root, dirs, files in os.walk(".", topdown = False):
+ for fn in files:
+ if not fn.endswith(".key"):
+ os.remove(os.path.join(root, fn))
+ for d in dirs:
+ try:
+ os.rmdir(os.path.join(root, d))
+ except OSError, e:
+ if e.errno == errno.ENOTDIR:
+ os.remove(os.path.join(root, d))
+ else:
+ raise
+
+ logger.info("Reading master YAML configuration")
+ y = yaml_script.pop(0)
+
+ logger.info("Constructing internal allocation database")
+ db = allocation_db(y)
+
+ logger.info("Constructing BPKI keys and certs for rootd")
+ setup_bpki_cert_chain(rootd_name, ee = ("RPKI",))
+
+ logger.info("Constructing BPKI keys and certs for pubd")
+ setup_bpki_cert_chain(pubd_name, ee = ("PUBD", "IRBE"))
+
+
+ for a in db:
+ a.setup_bpki_certs()
+
+ setup_publication(pubd_sql, db.root.irdb_db_name)
+ setup_rootd(db.root, y.get("rootd", {}), db)
+ setup_rsyncd()
+ setup_rcynic()
- try:
+ for a in db.engines:
+ a.setup_conf_file()
+ a.setup_sql(rpki_sql, irdb_sql)
+ a.sync_sql()
- logger.info("Starting rootd")
- rootd_process = subprocess.Popen((prog_python, prog_rootd, "--foreground", "--log-stdout", "--log-level", "debug", "--config", rootd_name + ".conf"))
+ try:
- logger.info("Starting pubd")
- pubd_process = subprocess.Popen((prog_python, prog_pubd, "--foreground", "--log-stdout", "--log-level", "debug", "--config", pubd_name + ".conf") +
- (("-p", pubd_name + ".prof") if args.profile else ()))
+ logger.info("Starting rootd")
+ rootd_process = subprocess.Popen((prog_python, prog_rootd, "--foreground", "--log-stdout", "--log-level", "debug"),
+ env = dict(os.environ, RPKI_CONF = rootd_name + ".conf"))
- logger.info("Starting rsyncd")
- rsyncd_process = subprocess.Popen((prog_rsyncd, "--daemon", "--no-detach", "--config", rsyncd_name + ".conf"))
+ logger.info("Starting pubd")
+ pubd_process = subprocess.Popen((prog_python, prog_pubd, "--foreground", "--log-stdout", "--log-level", "debug") +
+ (("-p", pubd_name + ".prof") if args.profile else ()),
+ env = dict(os.environ, RPKI_CONF = pubd_name + ".conf"))
- # Start rpkid and irdbd instances
- for a in db.engines:
- a.run_daemons()
+ logger.info("Starting rsyncd")
+ rsyncd_process = subprocess.Popen((prog_rsyncd, "--daemon", "--no-detach", "--config", rsyncd_name + ".conf"))
- # From this point on we'll be running event-driven, so the rest of
- # the code until final exit is all closures.
+ # Start rpkid and irdbd instances
+ for a in db.engines:
+ a.run_daemons()
- def start():
- rpki.async.iterator(db.engines, create_rpki_objects, created_rpki_objects)
+ # From this point on we'll be running event-driven, so the rest of
+ # the code until final exit is all closures.
- def create_rpki_objects(iterator, a):
- a.create_rpki_objects(iterator)
+ def start():
+ rpki.async.iterator(db.engines, create_rpki_objects, create_pubd_objects)
- def created_rpki_objects():
+ def create_rpki_objects(iterator, a):
+ a.create_rpki_objects(iterator)
- # Set pubd's BPKI CRL
- set_pubd_crl(yaml_loop)
+ def create_pubd_objects():
+ call_pubd([rpki.publication_control.client_elt.make_pdu(action = "create",
+ client_handle = db.root.client_handle + "-" + rootd_name,
+ base_uri = rootd_sia,
+ bpki_cert = cross_certify(rootd_name + "-TA", pubd_name + "-TA"))],
+ cb = lambda ignored: yaml_loop())
- def yaml_loop():
+ def yaml_loop():
- # This is probably where we should be updating expired BPKI
- # objects, particular CRLs
+ # This is probably where we should be updating expired BPKI
+ # objects, particular CRLs
- logger.info("Running cron for all RPKI engines")
- rpki.async.iterator(db.engines, run_cron, run_yaml)
+ logger.info("Running cron for all RPKI engines")
+ rpki.async.iterator(db.engines, run_cron, run_yaml)
- def run_cron(iterator, a):
- a.run_cron(iterator)
+ def run_cron(iterator, a):
+ a.run_cron(iterator)
- def run_yaml():
+ def run_yaml():
- # Run rcynic to check results
- run_rcynic()
+ # Run rcynic to check results
+ run_rcynic()
- # Apply next delta if we have one; otherwise, we're done.
- if yaml_script:
- logger.info("Applying deltas")
- db.apply_delta(yaml_script.pop(0), apply_delta_done)
- else:
- logger.info("No more deltas to apply, done")
- rpki.async.exit_event_loop()
+ # Apply next delta if we have one; otherwise, we're done.
+ if yaml_script:
+ logger.info("Applying deltas")
+ db.apply_delta(yaml_script.pop(0), apply_delta_done)
+ else:
+ logger.info("No more deltas to apply, done")
+ rpki.async.exit_event_loop()
- def apply_delta_done():
+ def apply_delta_done():
- # Resync IRDBs
- for a in db.engines:
- a.sync_sql()
+ # Resync IRDBs
+ for a in db.engines:
+ a.sync_sql()
- # Loop until we run out of control YAML
- yaml_loop()
+ # Loop until we run out of control YAML
+ yaml_loop()
- logger.info("Sleeping %d seconds while daemons start up", startup_delay)
- rpki.async.timer(start).set(rpki.sundial.timedelta(seconds = startup_delay))
- rpki.async.event_loop()
+ logger.info("Sleeping %d seconds while daemons start up", startup_delay)
+ rpki.async.timer(start).set(rpki.sundial.timedelta(seconds = startup_delay))
+ rpki.async.event_loop()
- # At this point we have gone into event-driven code.
+ # At this point we have gone into event-driven code.
- logger.info("Event loop exited normally")
+ logger.info("Event loop exited normally")
- except Exception, e:
- logger.exception("Event loop exited with an exception")
+ except Exception, e:
+ logger.exception("Event loop exited with an exception")
- finally:
- logger.info("Cleaning up")
- for a in db.engines:
- a.kill_daemons()
- for proc, name in ((rootd_process, "rootd"),
- (pubd_process, "pubd"),
- (rsyncd_process, "rsyncd")):
- # pylint: disable=E1103
- if proc is not None and proc.poll() is None:
- logger.info("Killing %s, pid %s", name, proc.pid)
- try:
- proc.terminate()
- except OSError:
- pass
- if proc is not None:
- logger.info("Daemon %s, pid %s exited with code %s", name, proc.pid, proc.wait())
+ finally:
+ logger.info("Cleaning up")
+ for a in db.engines:
+ a.kill_daemons()
+ for proc, name in ((rootd_process, "rootd"),
+ (pubd_process, "pubd"),
+ (rsyncd_process, "rsyncd")):
+ # pylint: disable=E1103
+ if proc is not None and proc.poll() is None:
+ logger.info("Killing %s, pid %s", name, proc.pid)
+ try:
+ proc.terminate()
+ except OSError:
+ pass
+ if proc is not None:
+ logger.info("Daemon %s, pid %s exited with code %s", name, proc.pid, proc.wait())
def cmd_sleep(cb, interval):
- """
- Set an alarm, then wait for it to go off.
- """
- howlong = rpki.sundial.timedelta.parse(interval)
- logger.info("Sleeping %r", howlong)
- rpki.async.timer(cb).set(howlong)
+ """
+ Set an alarm, then wait for it to go off.
+ """
+
+ howlong = rpki.sundial.timedelta.parse(interval)
+ logger.info("Sleeping %r", howlong)
+ rpki.async.timer(cb).set(howlong)
def cmd_shell(cb, *cmd):
- """
- Run a shell command.
- """
- cmd = " ".join(cmd)
- status = subprocess.call(cmd, shell = True)
- logger.info("Shell command returned status %d", status)
- cb()
+ """
+ Run a shell command.
+ """
+
+ cmd = " ".join(cmd)
+ status = subprocess.call(cmd, shell = True)
+ logger.info("Shell command returned status %d", status)
+ cb()
def cmd_echo(cb, *words):
- """
- Echo some text to the log.
- """
- logger.info(" ".join(words))
- cb()
+ """
+ Echo some text to the log.
+ """
+
+ logger.info(" ".join(words))
+ cb()
## @var cmds
# Dispatch table for commands embedded in delta sections
@@ -352,579 +359,954 @@ cmds = { "sleep" : cmd_sleep,
"echo" : cmd_echo }
class roa_request(object):
- """
- Representation for a roa_request object.
- """
-
- def __init__(self, asn, ipv4, ipv6):
- self.asn = asn
- self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
- self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
-
- def __eq__(self, other):
- return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
-
- def __hash__(self):
- v4 = tuple(self.v4) if self.v4 is not None else None
- v6 = tuple(self.v6) if self.v6 is not None else None
- return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
-
- def __str__(self):
- if self.v4 and self.v6: s = str(self.v4) + "," + str(self.v6)
- elif self.v4: s = str(self.v4)
- else: s = str(self.v6)
- return "%s: %s" % (self.asn, s)
-
- @classmethod
- def parse(cls, yaml):
- return cls(yaml.get("asn"), yaml.get("ipv4"), yaml.get("ipv6"))
-
-class router_cert(object):
- """
- Representation for a router_cert object.
- """
-
- _ecparams = None
- _keypair = None
- _pkcs10 = None
- _gski = None
-
- @classmethod
- def ecparams(cls):
- if cls._ecparams is None:
- cls._ecparams = rpki.x509.KeyParams.generateEC()
- return cls._ecparams
-
- def __init__(self, asn, router_id):
- self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
- self.router_id = router_id
- self.cn = "ROUTER-%08x" % self.asn[0].min
- self.sn = "%08x" % self.router_id
- self.eku = rpki.oids.id_kp_bgpsec_router
-
- @property
- def keypair(self):
- if self._keypair is None:
- self._keypair = rpki.x509.ECDSA.generate(self.ecparams())
- return self._keypair
-
- @property
- def pkcs10(self):
- if self._pkcs10 is None:
- self._pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
- return self._pkcs10
-
- @property
- def gski(self):
- if self._gski is None:
- self._gski = self.pkcs10.gSKI()
- return self._gski
-
- def __eq__(self, other):
- return self.asn == other.asn and self.sn == other.sn
-
- def __hash__(self):
- return tuple(self.asn).__hash__() + self.cn.__hash__() + self.sn.__hash__()
-
- def __str__(self):
- return "%s: %s,%s: %s" % (self.asn, self.cn, self.sn, self.gski)
-
- @classmethod
- def parse(cls, yaml):
- return cls(yaml.get("asn"), yaml.get("router_id"))
-
-class allocation_db(list):
- """
- Representation of all the entities and allocations in the test
- system. Almost everything is generated out of this database.
- """
-
- def __init__(self, yaml):
"""
- Initialize database from the (first) YAML document.
+ Representation for a roa_request object.
"""
- list.__init__(self)
- self.root = allocation(yaml, self)
- assert self.root.is_root
- if self.root.crl_interval is None:
- self.root.crl_interval = rpki.sundial.timedelta.parse(cfg.get("crl_interval", "1d")).convert_to_seconds()
- if self.root.regen_margin is None:
- self.root.regen_margin = rpki.sundial.timedelta.parse(cfg.get("regen_margin", "1d")).convert_to_seconds()
- for a in self:
- if a.sia_base is None:
- a.sia_base = (rootd_sia + "root/trunk/" if a.is_root else a.parent.sia_base) + a.name + "/"
- if a.base.valid_until is None:
- a.base.valid_until = a.parent.base.valid_until
- if a.crl_interval is None:
- a.crl_interval = a.parent.crl_interval
- if a.regen_margin is None:
- a.regen_margin = a.parent.regen_margin
- a.client_handle = "/".join(a.sia_base.split("/")[4:]).rstrip("/")
- self.root.closure()
- self.map = dict((a.name, a) for a in self)
- self.engines = [a for a in self if a.is_engine]
- for i, a in enumerate(self.engines):
- a.set_engine_number(i)
- for a in self:
- if a.is_hosted:
- a.hosted_by = self.map[a.hosted_by]
- a.hosted_by.hosts.append(a)
- assert a.is_twig, "%s is not twig" % a.name
- assert not a.hosted_by.is_hosted, "%s is hosted by a hosted entity" % a.name
-
- def apply_delta(self, delta, cb):
- """
- Apply a delta or run a command.
- """
-
- def loop(iterator, d):
- if isinstance(d, str):
- c = d.split()
- cmds[c[0]](iterator, *c[1:])
- else:
- self.map[d["name"]].apply_delta(d, iterator)
+ def __init__(self, asn, ipv4, ipv6):
+ self.asn = asn
+ self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
+ self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
- def done():
- self.root.closure()
- cb()
+ def __eq__(self, other):
+ return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
- if delta is None:
- cb()
- else:
- rpki.async.iterator(delta, loop, done)
+ def __hash__(self):
+ v4 = tuple(self.v4) if self.v4 is not None else None
+ v6 = tuple(self.v6) if self.v6 is not None else None
+ return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
- def dump(self):
- """
- Print content of the database.
- """
- for a in self:
- print a
+ def __str__(self):
+ if self.v4 and self.v6: s = str(self.v4) + "," + str(self.v6)
+ elif self.v4: s = str(self.v4)
+ else: s = str(self.v6)
+ return "%s: %s" % (self.asn, s)
-class allocation(object):
+ @classmethod
+ def parse(cls, yaml):
+ return cls(yaml.get("asn"), yaml.get("ipv4"), yaml.get("ipv6"))
- parent = None
- irdb_db_name = None
- irdb_port = None
- rpki_db_name = None
- rpki_port = None
- crl_interval = None
- regen_margin = None
- last_cms_time = None
- rpkid_process = None
- irdbd_process = None
-
- def __init__(self, yaml, db, parent = None):
- """
- Initialize one entity and insert it into the database.
- """
- db.append(self)
- self.name = yaml["name"]
- self.parent = parent
- self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
- valid_until = None
- if "valid_until" in yaml:
- valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until"))
- if valid_until is None and "valid_for" in yaml:
- valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
- self.base = rpki.resource_set.resource_bag(
- asn = rpki.resource_set.resource_set_as(yaml.get("asn")),
- v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
- v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
- valid_until = valid_until)
- self.sia_base = yaml.get("sia_base")
- if "crl_interval" in yaml:
- self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
- if "regen_margin" in yaml:
- self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
- self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
- for r in self.roa_requests:
- if r.v4:
- self.base.v4 |= r.v4.to_resource_set()
- if r.v6:
- self.base.v6 |= r.v6.to_resource_set()
- self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())]
- for r in self.router_certs:
- self.base.asn |= r.asn
- self.hosted_by = yaml.get("hosted_by")
- self.extra_conf = yaml.get("extra_conf", [])
- self.hosts = []
-
- def closure(self):
+class router_cert(object):
"""
- Compute the transitive resource closure.
+ Representation for a router_cert object.
"""
- resources = self.base
- for kid in self.kids:
- resources |= kid.closure()
- self.resources = resources
- return resources
- def apply_delta(self, yaml, cb):
+ _ecparams = None
+ _keypair = None
+ _pkcs10 = None
+ _gski = None
+
+ @classmethod
+ def ecparams(cls):
+ if cls._ecparams is None:
+ cls._ecparams = rpki.x509.KeyParams.generateEC()
+ return cls._ecparams
+
+ def __init__(self, asn, router_id):
+ self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
+ self.router_id = router_id
+ self.cn = "ROUTER-%08x" % self.asn[0].min
+ self.sn = "%08x" % self.router_id
+ self.eku = rpki.oids.id_kp_bgpsec_router
+
+ @property
+ def keypair(self):
+ if self._keypair is None:
+ self._keypair = rpki.x509.ECDSA.generate(self.ecparams())
+ return self._keypair
+
+ @property
+ def pkcs10(self):
+ if self._pkcs10 is None:
+ self._pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
+ return self._pkcs10
+
+ @property
+ def gski(self):
+ if self._gski is None:
+ self._gski = self.pkcs10.gSKI()
+ return self._gski
+
+ def __eq__(self, other):
+ return self.asn == other.asn and self.sn == other.sn
+
+ def __hash__(self):
+ return tuple(self.asn).__hash__() + self.cn.__hash__() + self.sn.__hash__()
+
+ def __str__(self):
+ return "%s: %s,%s: %s" % (self.asn, self.cn, self.sn, self.gski)
+
+ @classmethod
+ def parse(cls, yaml):
+ return cls(yaml.get("asn"), yaml.get("router_id"))
+
+class allocation_db(list):
"""
- Apply deltas to this entity.
+ Representation of all the entities and allocations in the test
+ system. Almost everything is generated out of this database.
"""
- logger.info("Applying delta: %s", yaml)
-
- def loop(iterator, kv):
- if kv[0] == "name":
- iterator()
- else:
- getattr(self, "apply_" + kv[0])(kv[1], iterator)
-
- rpki.async.iterator(yaml.items(), loop, cb)
-
- def apply_add_as(self, text, cb):
- self.base.asn |= rpki.resource_set.resource_set_as(text)
- cb()
-
- def apply_add_v4(self, text, cb):
- self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
- cb()
+ def __init__(self, yaml):
+ """
+ Initialize database from the (first) YAML document.
+ """
+
+ list.__init__(self)
+ self.root = allocation(yaml, self)
+ assert self.root.is_root
+ if self.root.crl_interval is None:
+ self.root.crl_interval = rpki.sundial.timedelta.parse(cfg.get("crl_interval", "1d")).convert_to_seconds()
+ if self.root.regen_margin is None:
+ self.root.regen_margin = rpki.sundial.timedelta.parse(cfg.get("regen_margin", "1d")).convert_to_seconds()
+ for a in self:
+ if a.sia_base is None:
+ a.sia_base = (rootd_sia + "root/trunk/" if a.is_root else a.parent.sia_base) + a.name + "/"
+ if a.base.valid_until is None:
+ a.base.valid_until = a.parent.base.valid_until
+ if a.crl_interval is None:
+ a.crl_interval = a.parent.crl_interval
+ if a.regen_margin is None:
+ a.regen_margin = a.parent.regen_margin
+ a.client_handle = "/".join(a.sia_base.split("/")[4:]).rstrip("/")
+ self.root.closure()
+ self.map = dict((a.name, a) for a in self)
+ self.engines = [a for a in self if a.is_engine]
+ for i, a in enumerate(self.engines):
+ a.set_engine_number(i)
+ for a in self:
+ if a.is_hosted:
+ a.hosted_by = self.map[a.hosted_by]
+ a.hosted_by.hosts.append(a)
+ assert a.is_twig, "%s is not twig" % a.name
+ assert not a.hosted_by.is_hosted, "%s is hosted by a hosted entity" % a.name
+
+ def apply_delta(self, delta, cb):
+ """
+ Apply a delta or run a command.
+ """
+
+ def loop(iterator, d):
+ if isinstance(d, str):
+ c = d.split()
+ cmds[c[0]](iterator, *c[1:])
+ else:
+ self.map[d["name"]].apply_delta(d, iterator)
+
+ def done():
+ self.root.closure()
+ cb()
+
+ if delta is None:
+ cb()
+ else:
+ rpki.async.iterator(delta, loop, done)
- def apply_add_v6(self, text, cb):
- self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
- cb()
+ def dump(self):
+ """
+ Print content of the database.
+ """
- def apply_sub_as(self, text, cb):
- self.base.asn |= rpki.resource_set.resource_set_as(text)
- cb()
+ for a in self:
+ print a
- def apply_sub_v4(self, text, cb):
- self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
- cb()
+class allocation(object):
- def apply_sub_v6(self, text, cb):
- self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
- cb()
+ parent = None
+ irdb_db_name = None
+ irdb_port = None
+ rpki_db_name = None
+ rpki_port = None
+ crl_interval = None
+ regen_margin = None
+ last_cms_time = None
+ rpkid_process = None
+ irdbd_process = None
+
+ def __init__(self, yaml, db, parent = None):
+ """
+ Initialize one entity and insert it into the database.
+ """
+
+ db.append(self)
+ self.name = yaml["name"]
+ self.parent = parent
+ self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
+ valid_until = None
+ if "valid_until" in yaml:
+ valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until"))
+ if valid_until is None and "valid_for" in yaml:
+ valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
+ self.base = rpki.resource_set.resource_bag(
+ asn = rpki.resource_set.resource_set_as(yaml.get("asn")),
+ v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
+ v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
+ valid_until = valid_until)
+ self.sia_base = yaml.get("sia_base")
+ if "crl_interval" in yaml:
+ self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
+ if "regen_margin" in yaml:
+ self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
+ self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
+ for r in self.roa_requests:
+ if r.v4:
+ self.base.v4 |= r.v4.to_resource_set()
+ if r.v6:
+ self.base.v6 |= r.v6.to_resource_set()
+ self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())]
+ for r in self.router_certs:
+ self.base.asn |= r.asn
+ self.hosted_by = yaml.get("hosted_by")
+ self.extra_conf = yaml.get("extra_conf", [])
+ self.hosts = []
+
+ def closure(self):
+ """
+ Compute the transitive resource closure.
+ """
+
+ resources = self.base
+ for kid in self.kids:
+ resources |= kid.closure()
+ self.resources = resources
+ return resources
+
+ def apply_delta(self, yaml, cb):
+ """
+ Apply deltas to this entity.
+ """
+
+ logger.info("Applying delta: %s", yaml)
+
+ def loop(iterator, kv):
+ if kv[0] == "name":
+ iterator()
+ else:
+ getattr(self, "apply_" + kv[0])(kv[1], iterator)
+
+ rpki.async.iterator(yaml.items(), loop, cb)
+
+ def apply_add_as(self, text, cb):
+ self.base.asn |= rpki.resource_set.resource_set_as(text)
+ cb()
+
+ def apply_add_v4(self, text, cb):
+ self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
+ cb()
+
+ def apply_add_v6(self, text, cb):
+ self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
+ cb()
+
+ def apply_sub_as(self, text, cb):
+ self.base.asn |= rpki.resource_set.resource_set_as(text)
+ cb()
+
+ def apply_sub_v4(self, text, cb):
+ self.base.v4 |= rpki.resource_set.resource_set_ipv4(text)
+ cb()
+
+ def apply_sub_v6(self, text, cb):
+ self.base.v6 |= rpki.resource_set.resource_set_ipv6(text)
+ cb()
+
+ def apply_valid_until(self, stamp, cb):
+ self.base.valid_until = rpki.sundial.datetime.from_datetime(stamp)
+ cb()
+
+ def apply_valid_for(self, text, cb):
+ self.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(text)
+ cb()
+
+ def apply_valid_add(self, text, cb):
+ self.base.valid_until += rpki.sundial.timedelta.parse(text)
+ cb()
+
+ def apply_valid_sub(self, text, cb):
+ self.base.valid_until -= rpki.sundial.timedelta.parse(text)
+ cb()
+
+ def apply_roa_request_add(self, yaml, cb):
+ for y in yaml:
+ r = roa_request.parse(y)
+ if r not in self.roa_requests:
+ self.roa_requests.append(r)
+ cb()
+
+ def apply_roa_request_del(self, yaml, cb):
+ for y in yaml:
+ r = roa_request.parse(y)
+ if r in self.roa_requests:
+ self.roa_requests.remove(r)
+ cb()
+
+ def apply_router_cert_add(self, yaml, cb):
+ for y in yaml:
+ r = router_cert.parse(y)
+ if r not in self.router_certs:
+ self.router_certs.append(r)
+ cb()
+
+ def apply_router_cert_del(self, yaml, cb):
+ for y in yaml:
+ r = router_cert.parse(y)
+ if r in self.router_certs:
+ self.router_certs.remove(r)
+ cb()
+
+ def apply_rekey(self, target, cb):
+
+ def done(e):
+ if isinstance(e, Exception):
+ logger.exception("Exception while rekeying %s", self.name)
+ raise e
+ cb()
+
+ if target is None:
+ logger.info("Rekeying <tenant/> %s", self.name)
+ self.call_rpkid([rpki.left_right.self_elt.make_pdu(
+ action = "set", self_handle = self.name, rekey = "yes")], cb = done)
+ else:
+ logger.info("Rekeying <parent/> %s %s", self.name, target)
+ self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
+ action = "set", self_handle = self.name, parent_handle = target, rekey = "yes")], cb = done)
+
+ def apply_revoke(self, target, cb):
+
+ def done(e):
+ if isinstance(e, Exception):
+ logger.exception("Exception while revoking %s", self.name)
+ raise e
+ cb()
+
+ if target is None:
+ logger.info("Revoking <tenant/> %s", self.name)
+ self.call_rpkid([rpki.left_right.self_elt.make_pdu(
+ action = "set", self_handle = self.name, revoke = "yes")], cb = done)
+ else:
+ logger.info("Revoking <parent/> %s %s", self.name, target)
+ self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
+ action = "set", self_handle = self.name, parent_handle = target, revoke = "yes")], cb = done)
+
+ def __str__(self):
+ s = self.name + "\n"
+ if self.resources.asn: s += " ASN: %s\n" % self.resources.asn
+ if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
+ if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
+ if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
+ if self.parent: s += " Up: %s\n" % self.parent.name
+ if self.sia_base: s += " SIA: %s\n" % self.sia_base
+ return s + "Until: %s\n" % self.resources.valid_until
+
+
+ @property
+ def is_root(self):
+ return self.parent is None
+
+ @property
+ def is_twig(self):
+ return not self.is_root
+
+ @property
+ def is_hosted(self):
+ return self.hosted_by is not None
+
+ @property
+ def is_engine(self):
+ return not self.is_hosted
+
+ def set_engine_number(self, n):
+ """
+ Set the engine number for this entity.
+ """
+
+ self.irdb_db_name = "irdb%d" % n
+ self.irdb_port = allocate_port()
+ self.rpki_db_name = "rpki%d" % n
+ self.rpki_port = allocate_port()
+
+ def get_rpki_port(self):
+ """
+ Get rpki port to use for this entity.
+ """
+
+ if self.is_hosted:
+ assert self.hosted_by.rpki_port is not None
+ return self.hosted_by.rpki_port
+ else:
+ assert self.rpki_port is not None
+ return self.rpki_port
+
+ def setup_bpki_certs(self):
+ """
+ Create BPKI certificates for this entity.
+ """
+
+ logger.info("Constructing BPKI keys and certs for %s", self.name)
+ setup_bpki_cert_chain(name = self.name,
+ ee = ("RPKI", "IRDB", "IRBE"),
+ ca = ("SELF",))
+ self.rpkid_ta = rpki.x509.X509(PEM_file = self.name + "-TA.cer")
+ self.irbe_key = rpki.x509.RSA( PEM_file = self.name + "-IRBE.key")
+ self.irbe_cert = rpki.x509.X509(PEM_file = self.name + "-IRBE.cer")
+ self.rpkid_cert = rpki.x509.X509(PEM_file = self.name + "-RPKI.cer")
+
+ def setup_conf_file(self):
+ """
+ Write config files for this entity.
+ """
+
+ logger.info("Writing config files for %s", self.name)
+ assert self.rpki_port is not None
+ d = dict(my_name = self.name,
+ irdb_db_name = self.irdb_db_name,
+ irdb_db_pass = irdb_db_pass,
+ irdb_port = self.irdb_port,
+ rpki_db_name = self.rpki_db_name,
+ rpki_db_pass = rpki_db_pass,
+ rpki_port = self.rpki_port)
+ f = open(self.name + ".conf", "w")
+ f.write(conf_fmt_1 % d)
+ for line in self.extra_conf:
+ f.write(line + "\n")
+ f.close()
+
+ def setup_sql(self, rpki_sql, irdb_sql):
+ """
+ Set up this entity's IRDB.
+ """
+
+ logger.info("Setting up MySQL for %s", self.name)
+ db = MySQLdb.connect(user = "rpki", db = self.rpki_db_name, passwd = rpki_db_pass,
+ conv = sql_conversions)
+ cur = db.cursor()
+ db.autocommit(True)
+ for sql in rpki_sql:
+ try:
+ cur.execute(sql)
+ except:
+ if "DROP TABLE IF EXISTS" not in sql.upper():
+ raise
+ db.close()
+ db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
+ conv = sql_conversions)
+ cur = db.cursor()
+ db.autocommit(True)
+ for sql in irdb_sql:
+ try:
+ cur.execute(sql)
+ except:
+ if "DROP TABLE IF EXISTS" not in sql.upper():
+ raise
+ for s in [self] + self.hosts:
+ for kid in s.kids:
+ cur.execute("INSERT registrant (registrant_handle, registry_handle, valid_until) VALUES (%s, %s, %s)",
+ (kid.name, s.name, kid.resources.valid_until))
+ db.close()
+
+ def sync_sql(self):
+ """
+ Whack this entity's IRDB to match our master database. We do this
+ once during setup, then do it again every time we apply a delta to
+ this entity.
+ """
+
+ logger.info("Updating MySQL data for IRDB %s", self.name)
+ db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
+ conv = sql_conversions)
+ cur = db.cursor()
+ db.autocommit(True)
+ cur.execute("DELETE FROM registrant_asn")
+ cur.execute("DELETE FROM registrant_net")
+ cur.execute("DELETE FROM roa_request_prefix")
+ cur.execute("DELETE FROM roa_request")
+ cur.execute("DELETE FROM ee_certificate_asn")
+ cur.execute("DELETE FROM ee_certificate_net")
+ cur.execute("DELETE FROM ee_certificate")
+
+ for s in [self] + self.hosts:
+ for kid in s.kids:
+ cur.execute("SELECT registrant_id FROM registrant WHERE registrant_handle = %s AND registry_handle = %s",
+ (kid.name, s.name))
+ registrant_id = cur.fetchone()[0]
+ for as_range in kid.resources.asn:
+ cur.execute("INSERT registrant_asn (start_as, end_as, registrant_id) VALUES (%s, %s, %s)",
+ (as_range.min, as_range.max, registrant_id))
+ for v4_range in kid.resources.v4:
+ cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 4, %s)",
+ (v4_range.min, v4_range.max, registrant_id))
+ for v6_range in kid.resources.v6:
+ cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 6, %s)",
+ (v6_range.min, v6_range.max, registrant_id))
+ cur.execute("UPDATE registrant SET valid_until = %s WHERE registrant_id = %s",
+ (kid.resources.valid_until, registrant_id))
+ for r in s.roa_requests:
+ cur.execute("INSERT roa_request (self_handle, asn) VALUES (%s, %s)",
+ (s.name, r.asn))
+ roa_request_id = cur.lastrowid
+ for version, prefix_set in ((4, r.v4), (6, r.v6)):
+ if prefix_set:
+ cur.executemany("INSERT roa_request_prefix "
+ "(roa_request_id, prefix, prefixlen, max_prefixlen, version) "
+ "VALUES (%s, %s, %s, %s, %s)",
+ ((roa_request_id, x.prefix, x.prefixlen, x.max_prefixlen, version)
+ for x in prefix_set))
+ for r in s.router_certs:
+ cur.execute("INSERT ee_certificate (self_handle, pkcs10, gski, cn, sn, eku, valid_until) "
+ "VALUES (%s, %s, %s, %s, %s, %s, %s)",
+ (s.name, r.pkcs10.get_DER(), r.gski, r.cn, r.sn, r.eku, s.resources.valid_until))
+ ee_certificate_id = cur.lastrowid
+ cur.executemany("INSERT ee_certificate_asn (ee_certificate_id, start_as, end_as) VALUES (%s, %s, %s)",
+ ((ee_certificate_id, a.min, a.max) for a in r.asn))
+ db.close()
+
+ def run_daemons(self):
+ """
+ Run daemons for this entity.
+ """
+
+ logger.info("Running daemons for %s", self.name)
+ env = dict(os.environ, RPKI_CONF = self.name + ".conf")
+ self.rpkid_process = subprocess.Popen((prog_python, prog_rpkid, "--foreground", "--log-stdout", "--log-level", "debug") +
+ (("--profile", self.name + ".prof") if args.profile else ()),
+ env = env)
+ self.irdbd_process = subprocess.Popen((prog_python, prog_irdbd, "--foreground", "--log-stdout", "--log-level", "debug"),
+ env = env)
+
+ def kill_daemons(self):
+ """
+ Kill daemons for this entity.
+ """
+
+ # pylint: disable=E1103
+ for proc, name in ((self.rpkid_process, "rpkid"),
+ (self.irdbd_process, "irdbd")):
+ if proc is not None and proc.poll() is None:
+ logger.info("Killing daemon %s pid %s for %s", name, proc.pid, self.name)
+ try:
+ proc.terminate()
+ except OSError:
+ pass
+ if proc is not None:
+ logger.info("Daemon %s pid %s for %s exited with code %s",
+ name, proc.pid, self.name, proc.wait())
+
+ def call_rpkid(self, pdus, cb):
+ """
+ Send a left-right message to this entity's RPKI daemon and return
+ the response.
+
+ If this entity is hosted (does not run its own RPKI daemon), all
+ of this happens with the hosting RPKI daemon.
+ """
+
+ logger.info("Calling rpkid for %s", self.name)
+
+ if self.is_hosted:
+ logger.info("rpkid %s is hosted by rpkid %s, switching", self.name, self.hosted_by.name)
+ self = self.hosted_by
+ assert not self.is_hosted
+
+ assert isinstance(pdus, (list, tuple))
+ assert self.rpki_port is not None
+
+ q_msg = rpki.left_right.msg.query(*pdus)
+ q_cms = rpki.left_right.cms_msg_saxify()
+ q_der = q_cms.wrap(q_msg, self.irbe_key, self.irbe_cert)
+ q_url = "http://localhost:%d/left-right" % self.rpki_port
+
+ logger.debug(q_cms.pretty_print_content())
+
+ def done(r_der):
+ logger.info("Callback from rpkid %s", self.name)
+ r_cms = rpki.left_right.cms_msg_saxify(DER = r_der)
+ r_msg = r_cms.unwrap((self.rpkid_ta, self.rpkid_cert))
+ self.last_cms_time = r_cms.check_replay(self.last_cms_time, q_url)
+ logger.debug(r_cms.pretty_print_content())
+ assert r_msg.is_reply
+ for r_pdu in r_msg:
+ assert not isinstance(r_pdu, rpki.left_right.report_error_elt)
+ cb(r_msg)
+
+ def lose(e):
+ raise
+
+ rpki.http.client(
+ url = q_url,
+ msg = q_der,
+ callback = done,
+ errback = lose)
+
+ def cross_certify(self, certificant, reverse = False):
+ """
+ Cross-certify and return the resulting certificate.
+ """
+
+ if reverse:
+ certifier = certificant
+ certificant = self.name + "-SELF"
+ else:
+ certifier = self.name + "-SELF"
+ return cross_certify(certificant, certifier)
+
+ def create_rpki_objects(self, cb):
+ """
+ Create RPKI engine objects for this engine.
+
+ Root node of the engine tree is special, it too has a parent but
+ that one is the magic self-signed micro engine.
+
+ The rest of this is straightforward. There are a lot of objects
+ to create, but we can do batch them all into one honking PDU, then
+ issue one more PDU to set BSC EE certificates based on the PKCS
+ #10 requests we get back when we tell rpkid to generate BSC keys.
+ """
+
+ assert not self.is_hosted
+
+ selves = [self] + self.hosts
+
+ rpkid_pdus = []
+ pubd_pdus = []
+
+ for i, s in enumerate(selves):
+ logger.info("Creating RPKI objects for [%d] %s", i, s.name)
+
+ rpkid_pdus.append(rpki.left_right.self_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ crl_interval = s.crl_interval,
+ regen_margin = s.regen_margin,
+ bpki_cert = (s.cross_certify(s.hosted_by.name + "-TA", reverse = True)
+ if s.is_hosted else
+ rpki.x509.X509(Auto_file = s.name + "-SELF.cer"))))
+
+ rpkid_pdus.append(rpki.left_right.bsc_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ bsc_handle = "b",
+ generate_keypair = True))
+
+ pubd_pdus.append(rpki.publication_control.client_elt.make_pdu(
+ action = "create",
+ client_handle = s.client_handle,
+ base_uri = s.sia_base,
+ bpki_cert = s.cross_certify(pubd_name + "-TA", reverse = True)))
+
+ rpkid_pdus.append(rpki.left_right.repository_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ bsc_handle = "b",
+ repository_handle = "r",
+ bpki_cert = s.cross_certify(pubd_name + "-TA"),
+ peer_contact_uri = "http://localhost:%d/client/%s" % (pubd_port, s.client_handle)))
+
+ for k in s.kids:
+ rpkid_pdus.append(rpki.left_right.child_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ child_handle = k.name,
+ bsc_handle = "b",
+ bpki_cert = s.cross_certify(k.name + "-SELF")))
+
+ if s.is_root:
+ rootd_cert = s.cross_certify(rootd_name + "-TA")
+ rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ parent_handle = "rootd",
+ bsc_handle = "b",
+ repository_handle = "r",
+ sia_base = s.sia_base,
+ bpki_cert = rootd_cert,
+ sender_name = s.name,
+ recipient_name = "rootd",
+ peer_contact_uri = "http://localhost:%s/" % rootd_port))
+ else:
+ rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
+ action = "create",
+ self_handle = s.name,
+ parent_handle = s.parent.name,
+ bsc_handle = "b",
+ repository_handle = "r",
+ sia_base = s.sia_base,
+ bpki_cert = s.cross_certify(s.parent.name + "-SELF"),
+ sender_name = s.name,
+ recipient_name = s.parent.name,
+ peer_contact_uri = "http://localhost:%s/up-down/%s/%s" % (s.parent.get_rpki_port(),
+ s.parent.name, s.name)))
+
+ def one():
+ call_pubd(pubd_pdus, cb = two)
+
+ def two(vals):
+ self.call_rpkid(rpkid_pdus, cb = three)
+
+ def three(vals):
+
+ bsc_dict = dict((b.self_handle, b) for b in vals if isinstance(b, rpki.left_right.bsc_elt))
+
+ bsc_pdus = []
+
+ for s in selves:
+ b = bsc_dict[s.name]
+
+ logger.info("Issuing BSC EE cert for %s", s.name)
+ cmd = (prog_openssl, "x509", "-req", "-sha256", "-extfile", s.name + "-RPKI.conf",
+ "-extensions", "req_x509_ext", "-days", "30",
+ "-CA", s.name + "-SELF.cer", "-CAkey", s.name + "-SELF.key", "-CAcreateserial", "-text")
+ signer = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
+ signed = signer.communicate(input = b.pkcs10_request.get_PEM())
+ if not signed[0]:
+ logger.warning(signed[1])
+ raise CouldntIssueBSCEECertificate("Couldn't issue BSC EE certificate")
+ s.bsc_ee = rpki.x509.X509(PEM = signed[0])
+ s.bsc_crl = rpki.x509.CRL(PEM_file = s.name + "-SELF.crl")
+ logger.info("BSC EE cert for %s SKI %s", s.name, s.bsc_ee.hSKI())
+
+ bsc_pdus.append(rpki.left_right.bsc_elt.make_pdu(
+ action = "set",
+ self_handle = s.name,
+ bsc_handle = "b",
+ signing_cert = s.bsc_ee,
+ signing_cert_crl = s.bsc_crl))
+
+ self.call_rpkid(bsc_pdus, cb = four)
+
+ def four(vals):
+ cb()
+
+ one()
+
+ def setup_yaml_leaf(self):
+ """
+ Generate certificates and write YAML scripts for leaf nodes.
+
+ We're cheating a bit here: properly speaking, we can't generate
+ issue or revoke requests without knowing the class, which is
+ generated on the fly, but at the moment the test case is
+ simplistic enough that the class will always be "1", so we just
+ wire in that value for now.
+
+ Well, ok, we just broke that assumption. Now we do something even
+ nastier, just to eke a bit more life out of this kludge. This
+ really needs to be rewritten, but it may require a different tool
+ than testpoke.
+ """
+
+ if not os.path.exists(self.name + ".key"):
+ logger.info("Generating RPKI key for %s", self.name)
+ subprocess.check_call((prog_openssl, "genrsa", "-out", self.name + ".key", "2048" ),
+ stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
+ ski = rpki.x509.RSA(PEM_file = self.name + ".key").gSKI()
+
+ if self.parent.is_hosted:
+ parent_host = self.parent.hosted_by.name
+ else:
+ parent_host = self.parent.name
- def apply_valid_until(self, stamp, cb):
- self.base.valid_until = rpki.sundial.datetime.from_datetime(stamp)
- cb()
+ self.cross_certify(self.parent.name + "-SELF")
+ self.cross_certify(parent_host + "-TA")
- def apply_valid_for(self, text, cb):
- self.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(text)
- cb()
+ def run_cron(self, cb):
+ """
+ Trigger cron run for this engine.
+ """
- def apply_valid_add(self, text, cb):
- self.base.valid_until += rpki.sundial.timedelta.parse(text)
- cb()
+ logger.info("Running cron for %s", self.name)
- def apply_valid_sub(self, text, cb):
- self.base.valid_until -= rpki.sundial.timedelta.parse(text)
- cb()
+ assert self.rpki_port is not None
- def apply_roa_request_add(self, yaml, cb):
- for y in yaml:
- r = roa_request.parse(y)
- if r not in self.roa_requests:
- self.roa_requests.append(r)
- cb()
+ def done(result):
+ assert result == "OK", 'Expected "OK" result from cronjob, got %r' % result
+ cb()
- def apply_roa_request_del(self, yaml, cb):
- for y in yaml:
- r = roa_request.parse(y)
- if r in self.roa_requests:
- self.roa_requests.remove(r)
- cb()
+ rpki.http.client(
+ url = "http://localhost:%d/cronjob" % self.rpki_port,
+ msg = "Run cron now, please",
+ callback = done,
+ errback = done)
- def apply_router_cert_add(self, yaml, cb):
- for y in yaml:
- r = router_cert.parse(y)
- if r not in self.router_certs:
- self.router_certs.append(r)
- cb()
+ def run_yaml(self):
+ """
+ Run YAML scripts for this leaf entity. Since we're not bothering
+ to check the class list returned by the list command, the issue
+ command may fail, so we treat failure of the list command as an
+ error, but only issue a warning when issue fails.
+ """
- def apply_router_cert_del(self, yaml, cb):
- for y in yaml:
- r = router_cert.parse(y)
- if r in self.router_certs:
- self.router_certs.remove(r)
- cb()
+ logger.info("Running YAML for %s", self.name)
+ subprocess.check_call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "list"))
+ if subprocess.call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "issue")) != 0:
+ logger.warning("YAML issue command failed for %s, continuing", self.name)
- def apply_rekey(self, target, cb):
-
- def done(e):
- if isinstance(e, Exception):
- logger.exception("Exception while rekeying %s", self.name)
- raise e
- cb()
-
- if target is None:
- logger.info("Rekeying <self/> %s", self.name)
- self.call_rpkid([rpki.left_right.self_elt.make_pdu(
- action = "set", self_handle = self.name, rekey = "yes")], cb = done)
- else:
- logger.info("Rekeying <parent/> %s %s", self.name, target)
- self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
- action = "set", self_handle = self.name, parent_handle = target, rekey = "yes")], cb = done)
-
- def apply_revoke(self, target, cb):
-
- def done(e):
- if isinstance(e, Exception):
- logger.exception("Exception while revoking %s", self.name)
- raise e
- cb()
-
- if target is None:
- logger.info("Revoking <self/> %s", self.name)
- self.call_rpkid([rpki.left_right.self_elt.make_pdu(
- action = "set", self_handle = self.name, revoke = "yes")], cb = done)
- else:
- logger.info("Revoking <parent/> %s %s", self.name, target)
- self.call_rpkid([rpki.left_right.parent_elt.make_pdu(
- action = "set", self_handle = self.name, parent_handle = target, revoke = "yes")], cb = done)
-
- def __str__(self):
- s = self.name + "\n"
- if self.resources.asn: s += " ASN: %s\n" % self.resources.asn
- if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
- if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
- if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
- if self.parent: s += " Up: %s\n" % self.parent.name
- if self.sia_base: s += " SIA: %s\n" % self.sia_base
- return s + "Until: %s\n" % self.resources.valid_until
-
-
- @property
- def is_root(self):
- return self.parent is None
-
- @property
- def is_twig(self):
- return not self.is_root
-
- @property
- def is_hosted(self):
- return self.hosted_by is not None
-
- @property
- def is_engine(self):
- return not self.is_hosted
-
- def set_engine_number(self, n):
+def setup_bpki_cert_chain(name, ee = (), ca = ()):
"""
- Set the engine number for this entity.
+ Build a set of BPKI certificates.
"""
- self.irdb_db_name = "irdb%d" % n
- self.irdb_port = allocate_port()
- self.rpki_db_name = "rpki%d" % n
- self.rpki_port = allocate_port()
- def get_rpki_port(self):
- """
- Get rpki port to use for this entity.
+ s = "exec >/dev/null 2>&1\n"
+ #s = "set -x\n"
+ for kind in ("TA",) + ee + ca:
+ d = dict(name = name,
+ kind = kind,
+ ca = "false" if kind in ee else "true",
+ openssl = prog_openssl)
+ f = open("%(name)s-%(kind)s.conf" % d, "w")
+ f.write(bpki_cert_fmt_1 % d)
+ f.close()
+ if not os.path.exists("%(name)s-%(kind)s.key" % d):
+ s += bpki_cert_fmt_2 % d
+ s += bpki_cert_fmt_3 % d
+ d = dict(name = name,
+ openssl = prog_openssl)
+ s += bpki_cert_fmt_4 % d
+ for kind in ee + ca:
+ d["kind"] = kind
+ s += bpki_cert_fmt_5 % d
+ for kind in ("TA",) + ca:
+ d["kind"] = kind
+ s += bpki_cert_fmt_6 % d
+ subprocess.check_call(s, shell = True)
+
+def setup_rootd(rpkid, rootd_yaml, db):
"""
- if self.is_hosted:
- assert self.hosted_by.rpki_port is not None
- return self.hosted_by.rpki_port
- else:
- assert self.rpki_port is not None
- return self.rpki_port
-
- def setup_bpki_certs(self):
+ Write the config files for rootd.
"""
- Create BPKI certificates for this entity.
- """
- logger.info("Constructing BPKI keys and certs for %s", self.name)
- setup_bpki_cert_chain(name = self.name,
- ee = ("RPKI", "IRDB", "IRBE"),
- ca = ("SELF",))
- self.rpkid_ta = rpki.x509.X509(PEM_file = self.name + "-TA.cer")
- self.irbe_key = rpki.x509.RSA( PEM_file = self.name + "-IRBE.key")
- self.irbe_cert = rpki.x509.X509(PEM_file = self.name + "-IRBE.cer")
- self.rpkid_cert = rpki.x509.X509(PEM_file = self.name + "-RPKI.cer")
-
- def setup_conf_file(self):
+
+ rpkid.cross_certify(rootd_name + "-TA", reverse = True)
+ cross_certify(pubd_name + "-TA", rootd_name + "-TA")
+ logger.info("Writing config files for %s", rootd_name)
+ d = dict(rootd_name = rootd_name,
+ rootd_port = rootd_port,
+ rpkid_name = rpkid.name,
+ pubd_name = pubd_name,
+ rootd_sia = rootd_sia,
+ rsyncd_dir = rsyncd_dir,
+ openssl = prog_openssl,
+ lifetime = rootd_yaml.get("lifetime", "30d"),
+ pubd_port = pubd_port,
+ rootd_handle = db.root.client_handle + "-" + rootd_name)
+ f = open(rootd_name + ".conf", "w")
+ f.write(rootd_fmt_1 % d)
+ f.close()
+ s = "exec >/dev/null 2>&1\n"
+ #s = "set -x\n"
+ if not os.path.exists("root.key"):
+ s += rootd_fmt_2 % d
+ s += rootd_fmt_3 % d
+ subprocess.check_call(s, shell = True)
+
+def setup_rcynic():
"""
- Write config files for this entity.
+ Write the config file for rcynic.
"""
- logger.info("Writing config files for %s", self.name)
- assert self.rpki_port is not None
- d = { "my_name" : self.name,
- "irdb_db_name" : self.irdb_db_name,
- "irdb_db_pass" : irdb_db_pass,
- "irdb_port" : self.irdb_port,
- "rpki_db_name" : self.rpki_db_name,
- "rpki_db_pass" : rpki_db_pass,
- "rpki_port" : self.rpki_port }
- f = open(self.name + ".conf", "w")
- f.write(conf_fmt_1 % d)
- for line in self.extra_conf:
- f.write(line + "\n")
+
+ logger.info("Config file for rcynic")
+ d = dict(rcynic_name = rcynic_name,
+ rootd_name = rootd_name,
+ rootd_sia = rootd_sia)
+ f = open(rcynic_name + ".conf", "w")
+ f.write(rcynic_fmt_1 % d)
f.close()
- def setup_sql(self, rpki_sql, irdb_sql):
+def setup_rsyncd():
"""
- Set up this entity's IRDB.
+ Write the config file for rsyncd.
"""
- logger.info("Setting up MySQL for %s", self.name)
- db = MySQLdb.connect(user = "rpki", db = self.rpki_db_name, passwd = rpki_db_pass,
- conv = sql_conversions)
- cur = db.cursor()
- db.autocommit(True)
- for sql in rpki_sql:
- try:
- cur.execute(sql)
- except Exception:
- if "DROP TABLE IF EXISTS" not in sql.upper():
- raise
- db.close()
- db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
- conv = sql_conversions)
- cur = db.cursor()
- db.autocommit(True)
- for sql in irdb_sql:
- try:
- cur.execute(sql)
- except Exception:
- if "DROP TABLE IF EXISTS" not in sql.upper():
- raise
- for s in [self] + self.hosts:
- for kid in s.kids:
- cur.execute("INSERT registrant (registrant_handle, registry_handle, valid_until) VALUES (%s, %s, %s)",
- (kid.name, s.name, kid.resources.valid_until))
- db.close()
- def sync_sql(self):
+ logger.info("Config file for rsyncd")
+ d = dict(rsyncd_name = rsyncd_name,
+ rsyncd_port = rsyncd_port,
+ rsyncd_module = rsyncd_module,
+ rsyncd_dir = rsyncd_dir)
+ f = open(rsyncd_name + ".conf", "w")
+ f.write(rsyncd_fmt_1 % d)
+ f.close()
+
+def setup_publication(pubd_sql, irdb_db_name):
"""
- Whack this entity's IRDB to match our master database. We do this
- once during setup, then do it again every time we apply a delta to
- this entity.
+ Set up publication daemon.
"""
- logger.info("Updating MySQL data for IRDB %s", self.name)
- db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass,
+
+ logger.info("Configure publication daemon")
+ publication_dir = os.getcwd() + "/publication"
+ assert rootd_sia.startswith("rsync://")
+ global rsyncd_dir
+ rsyncd_dir = publication_dir + "/".join(rootd_sia.split("/")[4:])
+ if not rsyncd_dir.endswith("/"):
+ rsyncd_dir += "/"
+ os.makedirs(rsyncd_dir + "root/trunk")
+ db = MySQLdb.connect(db = pubd_db_name, user = pubd_db_user, passwd = pubd_db_pass,
conv = sql_conversions)
cur = db.cursor()
db.autocommit(True)
- cur.execute("DELETE FROM registrant_asn")
- cur.execute("DELETE FROM registrant_net")
- cur.execute("DELETE FROM roa_request_prefix")
- cur.execute("DELETE FROM roa_request")
- cur.execute("DELETE FROM ee_certificate_asn")
- cur.execute("DELETE FROM ee_certificate_net")
- cur.execute("DELETE FROM ee_certificate")
-
- for s in [self] + self.hosts:
- for kid in s.kids:
- cur.execute("SELECT registrant_id FROM registrant WHERE registrant_handle = %s AND registry_handle = %s",
- (kid.name, s.name))
- registrant_id = cur.fetchone()[0]
- for as_range in kid.resources.asn:
- cur.execute("INSERT registrant_asn (start_as, end_as, registrant_id) VALUES (%s, %s, %s)",
- (as_range.min, as_range.max, registrant_id))
- for v4_range in kid.resources.v4:
- cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 4, %s)",
- (v4_range.min, v4_range.max, registrant_id))
- for v6_range in kid.resources.v6:
- cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 6, %s)",
- (v6_range.min, v6_range.max, registrant_id))
- cur.execute("UPDATE registrant SET valid_until = %s WHERE registrant_id = %s",
- (kid.resources.valid_until, registrant_id))
- for r in s.roa_requests:
- cur.execute("INSERT roa_request (self_handle, asn) VALUES (%s, %s)",
- (s.name, r.asn))
- roa_request_id = cur.lastrowid
- for version, prefix_set in ((4, r.v4), (6, r.v6)):
- if prefix_set:
- cur.executemany("INSERT roa_request_prefix "
- "(roa_request_id, prefix, prefixlen, max_prefixlen, version) "
- "VALUES (%s, %s, %s, %s, %s)",
- ((roa_request_id, x.prefix, x.prefixlen, x.max_prefixlen, version)
- for x in prefix_set))
- for r in s.router_certs:
- cur.execute("INSERT ee_certificate (self_handle, pkcs10, gski, cn, sn, eku, valid_until) "
- "VALUES (%s, %s, %s, %s, %s, %s, %s)",
- (s.name, r.pkcs10.get_DER(), r.gski, r.cn, r.sn, r.eku, s.resources.valid_until))
- ee_certificate_id = cur.lastrowid
- cur.executemany("INSERT ee_certificate_asn (ee_certificate_id, start_as, end_as) VALUES (%s, %s, %s)",
- ((ee_certificate_id, a.min, a.max) for a in r.asn))
+ for sql in pubd_sql:
+ try:
+ cur.execute(sql)
+ except:
+ if "DROP TABLE IF EXISTS" not in sql.upper():
+ raise
db.close()
+ d = dict(pubd_name = pubd_name,
+ pubd_port = pubd_port,
+ pubd_db_name = pubd_db_name,
+ pubd_db_user = pubd_db_user,
+ pubd_db_pass = pubd_db_pass,
+ pubd_dir = rsyncd_dir,
+ irdb_db_name = irdb_db_name,
+ irdb_db_pass = irdb_db_pass)
+ f = open(pubd_name + ".conf", "w")
+ f.write(pubd_fmt_1 % d)
+ f.close()
+ global pubd_ta
+ global pubd_irbe_key
+ global pubd_irbe_cert
+ global pubd_pubd_cert
+ pubd_ta = rpki.x509.X509(Auto_file = pubd_name + "-TA.cer")
+ pubd_irbe_key = rpki.x509.RSA( Auto_file = pubd_name + "-IRBE.key")
+ pubd_irbe_cert = rpki.x509.X509(Auto_file = pubd_name + "-IRBE.cer")
+ pubd_pubd_cert = rpki.x509.X509(Auto_file = pubd_name + "-PUBD.cer")
- def run_daemons(self):
- """
- Run daemons for this entity.
- """
- logger.info("Running daemons for %s", self.name)
- self.rpkid_process = subprocess.Popen((prog_python, prog_rpkid, "--foreground", "--log-stdout", "--log-level", "debug", "--config", self.name + ".conf") +
- (("--profile", self.name + ".prof") if args.profile else ()))
- self.irdbd_process = subprocess.Popen((prog_python, prog_irdbd, "--foreground", "--log-stdout", "--log-level", "debug", "--config", self.name + ".conf"))
-
- def kill_daemons(self):
- """
- Kill daemons for this entity.
- """
- # pylint: disable=E1103
- for proc, name in ((self.rpkid_process, "rpkid"),
- (self.irdbd_process, "irdbd")):
- if proc is not None and proc.poll() is None:
- logger.info("Killing daemon %s pid %s for %s", name, proc.pid, self.name)
- try:
- proc.terminate()
- except OSError:
- pass
- if proc is not None:
- logger.info("Daemon %s pid %s for %s exited with code %s",
- name, proc.pid, self.name, proc.wait())
-
- def call_rpkid(self, pdus, cb):
+def call_pubd(pdus, cb):
"""
- Send a left-right message to this entity's RPKI daemon and return
+ Send a publication control message to publication daemon and return
the response.
-
- If this entity is hosted (does not run its own RPKI daemon), all
- of this happens with the hosting RPKI daemon.
"""
- logger.info("Calling rpkid for %s", self.name)
-
- if self.is_hosted:
- logger.info("rpkid %s is hosted by rpkid %s, switching", self.name, self.hosted_by.name)
- self = self.hosted_by
- assert not self.is_hosted
-
- assert isinstance(pdus, (list, tuple))
- assert self.rpki_port is not None
-
- q_msg = rpki.left_right.msg.query(*pdus)
- q_cms = rpki.left_right.cms_msg()
- q_der = q_cms.wrap(q_msg, self.irbe_key, self.irbe_cert)
- q_url = "http://localhost:%d/left-right" % self.rpki_port
+ logger.info("Calling pubd")
+ q_msg = rpki.publication_control.msg.query(*pdus)
+ q_cms = rpki.publication_control.cms_msg_saxify()
+ q_der = q_cms.wrap(q_msg, pubd_irbe_key, pubd_irbe_cert)
+ q_url = "http://localhost:%d/control" % pubd_port
logger.debug(q_cms.pretty_print_content())
- def done(r_der):
- logger.info("Callback from rpkid %s", self.name)
- r_cms = rpki.left_right.cms_msg(DER = r_der)
- r_msg = r_cms.unwrap((self.rpkid_ta, self.rpkid_cert))
- self.last_cms_time = r_cms.check_replay(self.last_cms_time, q_url)
- logger.debug(r_cms.pretty_print_content())
- assert r_msg.is_reply
- for r_pdu in r_msg:
- assert not isinstance(r_pdu, rpki.left_right.report_error_elt)
- cb(r_msg)
+ def call_pubd_cb(r_der):
+ global pubd_last_cms_time
+ r_cms = rpki.publication_control.cms_msg_saxify(DER = r_der)
+ r_msg = r_cms.unwrap((pubd_ta, pubd_pubd_cert))
+ pubd_last_cms_time = r_cms.check_replay(pubd_last_cms_time, q_url)
+ logger.debug(r_cms.pretty_print_content())
+ assert r_msg.is_reply
+ for r_pdu in r_msg:
+ r_pdu.raise_if_error()
+ cb(r_msg)
- def lose(e):
- raise
+ def call_pubd_eb(e):
+ logger.exception("Problem calling pubd")
rpki.http.client(
- url = q_url,
- msg = q_der,
- callback = done,
- errback = lose)
+ url = q_url,
+ msg = q_der,
+ callback = call_pubd_cb,
+ errback = call_pubd_eb)
+
- def cross_certify(self, certificant, reverse = False):
+def cross_certify(certificant, certifier):
"""
Cross-certify and return the resulting certificate.
"""
- if reverse:
- certifier = certificant
- certificant = self.name + "-SELF"
- else:
- certifier = self.name + "-SELF"
certfile = certifier + "-" + certificant + ".cer"
logger.info("Cross certifying %s into %s's BPKI (%s)", certificant, certifier, certfile)
@@ -938,424 +1320,58 @@ class allocation(object):
notAfter = now + rpki.sundial.timedelta(days = 30)
try:
- f = open(serial_file, "r")
- serial = f.read()
- f.close()
- serial = int(serial.splitlines()[0], 16)
+ with open(serial_file, "r") as f:
+ serial = int(f.read().splitlines()[0], 16)
except IOError:
- serial = 1
+ serial = 1
x = parent.bpki_cross_certify(
- keypair = keypair,
- source_cert = child,
- serial = serial,
- notAfter = notAfter,
- now = now)
-
- f = open(serial_file, "w")
- f.write("%02x\n" % (serial + 1))
- f.close()
+ keypair = keypair,
+ source_cert = child,
+ serial = serial,
+ notAfter = notAfter,
+ now = now)
- f = open(certfile, "w")
- f.write(x.get_PEM())
- f.close()
+ with open(serial_file, "w") as f:
+ f.write("%02x\n" % (serial + 1))
+
+ with open(certfile, "w") as f:
+ f.write(x.get_PEM())
logger.debug("Cross certified %s:", certfile)
logger.debug(" Issuer %s [%s]", x.getIssuer(), x.hAKI())
logger.debug(" Subject %s [%s]", x.getSubject(), x.hSKI())
return x
- def create_rpki_objects(self, cb):
- """
- Create RPKI engine objects for this engine.
-
- Root node of the engine tree is special, it too has a parent but
- that one is the magic self-signed micro engine.
-
- The rest of this is straightforward. There are a lot of objects
- to create, but we can do batch them all into one honking PDU, then
- issue one more PDU to set BSC EE certificates based on the PKCS
- #10 requests we get back when we tell rpkid to generate BSC keys.
- """
-
- assert not self.is_hosted
-
- selves = [self] + self.hosts
-
- for i, s in enumerate(selves):
- logger.info("Creating RPKI objects for [%d] %s", i, s.name)
-
- rpkid_pdus = []
- pubd_pdus = []
-
- for s in selves:
-
- rpkid_pdus.append(rpki.left_right.self_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- crl_interval = s.crl_interval,
- regen_margin = s.regen_margin,
- bpki_cert = (s.cross_certify(s.hosted_by.name + "-TA", reverse = True)
- if s.is_hosted else
- rpki.x509.X509(Auto_file = s.name + "-SELF.cer"))))
-
- rpkid_pdus.append(rpki.left_right.bsc_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- bsc_handle = "b",
- generate_keypair = True))
-
- pubd_pdus.append(rpki.publication.client_elt.make_pdu(
- action = "create",
- client_handle = s.client_handle,
- base_uri = s.sia_base,
- bpki_cert = s.cross_certify(pubd_name + "-TA", reverse = True)))
-
- rpkid_pdus.append(rpki.left_right.repository_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- bsc_handle = "b",
- repository_handle = "r",
- bpki_cert = s.cross_certify(pubd_name + "-TA"),
- peer_contact_uri = "http://localhost:%d/client/%s" % (pubd_port, s.client_handle)))
-
- for k in s.kids:
- rpkid_pdus.append(rpki.left_right.child_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- child_handle = k.name,
- bsc_handle = "b",
- bpki_cert = s.cross_certify(k.name + "-SELF")))
-
- if s.is_root:
- rootd_cert = s.cross_certify(rootd_name + "-TA")
- rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- parent_handle = "rootd",
- bsc_handle = "b",
- repository_handle = "r",
- sia_base = s.sia_base,
- bpki_cms_cert = rootd_cert,
- sender_name = s.name,
- recipient_name = "rootd",
- peer_contact_uri = "http://localhost:%s/" % rootd_port))
- else:
- rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu(
- action = "create",
- self_handle = s.name,
- parent_handle = s.parent.name,
- bsc_handle = "b",
- repository_handle = "r",
- sia_base = s.sia_base,
- bpki_cms_cert = s.cross_certify(s.parent.name + "-SELF"),
- sender_name = s.name,
- recipient_name = s.parent.name,
- peer_contact_uri = "http://localhost:%s/up-down/%s/%s" % (s.parent.get_rpki_port(),
- s.parent.name, s.name)))
-
- def one():
- call_pubd(pubd_pdus, cb = two)
-
- def two(vals):
- self.call_rpkid(rpkid_pdus, cb = three)
-
- def three(vals):
-
- bsc_dict = dict((b.self_handle, b) for b in vals if isinstance(b, rpki.left_right.bsc_elt))
-
- bsc_pdus = []
-
- for s in selves:
- b = bsc_dict[s.name]
-
- logger.info("Issuing BSC EE cert for %s", s.name)
- cmd = (prog_openssl, "x509", "-req", "-sha256", "-extfile", s.name + "-RPKI.conf",
- "-extensions", "req_x509_ext", "-days", "30",
- "-CA", s.name + "-SELF.cer", "-CAkey", s.name + "-SELF.key", "-CAcreateserial", "-text")
- signer = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
- signed = signer.communicate(input = b.pkcs10_request.get_PEM())
- if not signed[0]:
- logger.warning(signed[1])
- raise CouldntIssueBSCEECertificate("Couldn't issue BSC EE certificate")
- s.bsc_ee = rpki.x509.X509(PEM = signed[0])
- s.bsc_crl = rpki.x509.CRL(PEM_file = s.name + "-SELF.crl")
- logger.info("BSC EE cert for %s SKI %s", s.name, s.bsc_ee.hSKI())
-
- bsc_pdus.append(rpki.left_right.bsc_elt.make_pdu(
- action = "set",
- self_handle = s.name,
- bsc_handle = "b",
- signing_cert = s.bsc_ee,
- signing_cert_crl = s.bsc_crl))
-
- self.call_rpkid(bsc_pdus, cb = four)
-
- def four(vals):
- cb()
-
- one()
-
- def setup_yaml_leaf(self):
- """
- Generate certificates and write YAML scripts for leaf nodes.
-
- We're cheating a bit here: properly speaking, we can't generate
- issue or revoke requests without knowing the class, which is
- generated on the fly, but at the moment the test case is
- simplistic enough that the class will always be "1", so we just
- wire in that value for now.
-
- Well, ok, we just broke that assumption. Now we do something even
- nastier, just to eke a bit more life out of this kludge. This
- really needs to be rewritten, but it may require a different tool
- than testpoke.
- """
-
- if not os.path.exists(self.name + ".key"):
- logger.info("Generating RPKI key for %s", self.name)
- subprocess.check_call((prog_openssl, "genrsa", "-out", self.name + ".key", "2048" ),
- stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
- ski = rpki.x509.RSA(PEM_file = self.name + ".key").gSKI()
-
- if self.parent.is_hosted:
- parent_host = self.parent.hosted_by.name
- else:
- parent_host = self.parent.name
-
- self.cross_certify(self.parent.name + "-SELF")
- self.cross_certify(parent_host + "-TA")
-
- logger.info("Writing leaf YAML for %s", self.name)
- f = open(self.name + ".yaml", "w")
- f.write(yaml_fmt_1 % {
- "parent_name" : self.parent.name,
- "parent_host" : parent_host,
- "my_name" : self.name,
- "http_port" : self.parent.get_rpki_port(),
- "class_name" : 2 if self.parent.is_hosted else 1,
- "sia" : self.sia_base,
- "ski" : ski })
- f.close()
+last_rcynic_run = None
- def run_cron(self, cb):
+def run_rcynic():
"""
- Trigger cron run for this engine.
+ Run rcynic to see whether what was published makes sense.
"""
- logger.info("Running cron for %s", self.name)
-
- assert self.rpki_port is not None
+ logger.info("Running rcynic")
+ env = os.environ.copy()
+ env["TZ"] = ""
+ global last_rcynic_run
+ if int(time.time()) == last_rcynic_run:
+ time.sleep(1)
+ subprocess.check_call((prog_rcynic, "-c", rcynic_name + ".conf"), env = env)
+ subprocess.call(rcynic_stats, shell = True, env = env)
+ last_rcynic_run = int(time.time())
+ os.link("%s.xml" % rcynic_name, "%s.%s.xml" % (rcynic_name, last_rcynic_run))
- def done(result):
- assert result == "OK", 'Expected "OK" result from cronjob, got %r' % result
- cb()
-
- rpki.http.client(
- url = "http://localhost:%d/cronjob" % self.rpki_port,
- msg = "Run cron now, please",
- callback = done,
- errback = done)
-
- def run_yaml(self):
+def mangle_sql(filename):
"""
- Run YAML scripts for this leaf entity. Since we're not bothering
- to check the class list returned by the list command, the issue
- command may fail, so we treat failure of the list command as an
- error, but only issue a warning when issue fails.
+ Mangle an SQL file into a sequence of SQL statements.
"""
- logger.info("Running YAML for %s", self.name)
- subprocess.check_call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "list"))
- if subprocess.call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "issue")) != 0:
- logger.warning("YAML issue command failed for %s, continuing", self.name)
-
-def setup_bpki_cert_chain(name, ee = (), ca = ()):
- """
- Build a set of BPKI certificates.
- """
- s = "exec >/dev/null 2>&1\n"
- #s = "set -x\n"
- for kind in ("TA",) + ee + ca:
- d = { "name" : name,
- "kind" : kind,
- "ca" : "false" if kind in ee else "true",
- "openssl" : prog_openssl }
- f = open("%(name)s-%(kind)s.conf" % d, "w")
- f.write(bpki_cert_fmt_1 % d)
+ words = []
+ f = open(filename)
+ for line in f:
+ words.extend(line.partition("--")[0].split())
f.close()
- if not os.path.exists("%(name)s-%(kind)s.key" % d):
- s += bpki_cert_fmt_2 % d
- s += bpki_cert_fmt_3 % d
- d = { "name" : name, "openssl" : prog_openssl }
- s += bpki_cert_fmt_4 % d
- for kind in ee + ca:
- d["kind"] = kind
- s += bpki_cert_fmt_5 % d
- for kind in ("TA",) + ca:
- d["kind"] = kind
- s += bpki_cert_fmt_6 % d
- subprocess.check_call(s, shell = True)
-
-def setup_rootd(rpkid, rootd_yaml):
- """
- Write the config files for rootd.
- """
- rpkid.cross_certify(rootd_name + "-TA", reverse = True)
- logger.info("Writing config files for %s", rootd_name)
- d = { "rootd_name" : rootd_name,
- "rootd_port" : rootd_port,
- "rpkid_name" : rpkid.name,
- "rootd_sia" : rootd_sia,
- "rsyncd_dir" : rsyncd_dir,
- "openssl" : prog_openssl,
- "lifetime" : rootd_yaml.get("lifetime", "30d") }
- f = open(rootd_name + ".conf", "w")
- f.write(rootd_fmt_1 % d)
- f.close()
- s = "exec >/dev/null 2>&1\n"
- #s = "set -x\n"
- if not os.path.exists("root.key"):
- s += rootd_fmt_2 % d
- s += rootd_fmt_3 % d
- subprocess.check_call(s, shell = True)
-
-def setup_rcynic():
- """
- Write the config file for rcynic.
- """
- logger.info("Config file for rcynic")
- d = { "rcynic_name" : rcynic_name,
- "rootd_name" : rootd_name,
- "rootd_sia" : rootd_sia }
- f = open(rcynic_name + ".conf", "w")
- f.write(rcynic_fmt_1 % d)
- f.close()
-
-def setup_rsyncd():
- """
- Write the config file for rsyncd.
- """
- logger.info("Config file for rsyncd")
- d = { "rsyncd_name" : rsyncd_name,
- "rsyncd_port" : rsyncd_port,
- "rsyncd_module" : rsyncd_module,
- "rsyncd_dir" : rsyncd_dir }
- f = open(rsyncd_name + ".conf", "w")
- f.write(rsyncd_fmt_1 % d)
- f.close()
-
-def setup_publication(pubd_sql):
- """
- Set up publication daemon.
- """
- logger.info("Configure publication daemon")
- publication_dir = os.getcwd() + "/publication"
- assert rootd_sia.startswith("rsync://")
- global rsyncd_dir
- rsyncd_dir = publication_dir + "/".join(rootd_sia.split("/")[4:])
- if not rsyncd_dir.endswith("/"):
- rsyncd_dir += "/"
- os.makedirs(rsyncd_dir + "root/trunk")
- db = MySQLdb.connect(db = pubd_db_name, user = pubd_db_user, passwd = pubd_db_pass,
- conv = sql_conversions)
- cur = db.cursor()
- db.autocommit(True)
- for sql in pubd_sql:
- try:
- cur.execute(sql)
- except Exception:
- if "DROP TABLE IF EXISTS" not in sql.upper():
- raise
- db.close()
- d = { "pubd_name" : pubd_name,
- "pubd_port" : pubd_port,
- "pubd_db_name" : pubd_db_name,
- "pubd_db_user" : pubd_db_user,
- "pubd_db_pass" : pubd_db_pass,
- "pubd_dir" : rsyncd_dir }
- f = open(pubd_name + ".conf", "w")
- f.write(pubd_fmt_1 % d)
- f.close()
- global pubd_ta
- global pubd_irbe_key
- global pubd_irbe_cert
- global pubd_pubd_cert
- pubd_ta = rpki.x509.X509(Auto_file = pubd_name + "-TA.cer")
- pubd_irbe_key = rpki.x509.RSA( Auto_file = pubd_name + "-IRBE.key")
- pubd_irbe_cert = rpki.x509.X509(Auto_file = pubd_name + "-IRBE.cer")
- pubd_pubd_cert = rpki.x509.X509(Auto_file = pubd_name + "-PUBD.cer")
-
-def call_pubd(pdus, cb):
- """
- Send a publication message to publication daemon and return the
- response.
- """
- logger.info("Calling pubd")
- q_msg = rpki.publication.msg.query(*pdus)
- q_cms = rpki.publication.cms_msg()
- q_der = q_cms.wrap(q_msg, pubd_irbe_key, pubd_irbe_cert)
- q_url = "http://localhost:%d/control" % pubd_port
-
- logger.debug(q_cms.pretty_print_content())
-
- def call_pubd_cb(r_der):
- global pubd_last_cms_time
- r_cms = rpki.publication.cms_msg(DER = r_der)
- r_msg = r_cms.unwrap((pubd_ta, pubd_pubd_cert))
- pubd_last_cms_time = r_cms.check_replay(pubd_last_cms_time, q_url)
- logger.debug(r_cms.pretty_print_content())
- assert r_msg.is_reply
- for r_pdu in r_msg:
- assert not isinstance(r_pdu, rpki.publication.report_error_elt)
- cb(r_msg)
-
- def call_pubd_eb(e):
- logger.exception("Problem calling pubd")
-
- rpki.http.client(
- url = q_url,
- msg = q_der,
- callback = call_pubd_cb,
- errback = call_pubd_eb)
-
-def set_pubd_crl(cb):
- """
- Whack publication daemon's bpki_crl. This must be configured before
- publication daemon starts talking to its clients, and must be
- updated whenever we update the CRL.
- """
- logger.info("Setting pubd's BPKI CRL")
- crl = rpki.x509.CRL(Auto_file = pubd_name + "-TA.crl")
- call_pubd([rpki.publication.config_elt.make_pdu(action = "set", bpki_crl = crl)], cb = lambda ignored: cb())
-
-last_rcynic_run = None
-
-def run_rcynic():
- """
- Run rcynic to see whether what was published makes sense.
- """
- logger.info("Running rcynic")
- env = os.environ.copy()
- env["TZ"] = ""
- global last_rcynic_run
- if int(time.time()) == last_rcynic_run:
- time.sleep(1)
- subprocess.check_call((prog_rcynic, "-c", rcynic_name + ".conf"), env = env)
- subprocess.call(rcynic_stats, shell = True, env = env)
- last_rcynic_run = int(time.time())
- os.link("%s.xml" % rcynic_name, "%s.%s.xml" % (rcynic_name, last_rcynic_run))
-
-def mangle_sql(filename):
- """
- Mangle an SQL file into a sequence of SQL statements.
- """
- words = []
- f = open(filename)
- for line in f:
- words.extend(line.partition("--")[0].split())
- f.close()
- return " ".join(words).strip(";").split(";")
+ return " ".join(words).strip(";").split(";")
bpki_cert_fmt_1 = '''\
[req]
@@ -1432,88 +1448,57 @@ bpki_cert_fmt_6 = ''' && \
-config %(name)s-%(kind)s.conf \
'''
-yaml_fmt_1 = '''---
-version: 1
-posturl: http://localhost:%(http_port)s/up-down/%(parent_name)s/%(my_name)s
-recipient-id: "%(parent_name)s"
-sender-id: "%(my_name)s"
-
-cms-cert-file: %(my_name)s-RPKI.cer
-cms-key-file: %(my_name)s-RPKI.key
-cms-ca-cert-file: %(my_name)s-TA.cer
-cms-crl-file: %(my_name)s-TA.crl
-cms-ca-certs-file:
- - %(my_name)s-TA-%(parent_name)s-SELF.cer
-
-ssl-cert-file: %(my_name)s-RPKI.cer
-ssl-key-file: %(my_name)s-RPKI.key
-ssl-ca-cert-file: %(my_name)s-TA.cer
-ssl-ca-certs-file:
- - %(my_name)s-TA-%(parent_host)s-TA.cer
-
-# We're cheating here by hardwiring the class name
-
-requests:
- list:
- type: list
- issue:
- type: issue
- class: %(class_name)s
- sia:
- - %(sia)s
- cert-request-key-file: %(my_name)s.key
- revoke:
- type: revoke
- class: %(class_name)s
- ski: %(ski)s
-'''
-
conf_fmt_1 = '''\
[irdbd]
-startup-message = This is %(my_name)s irdbd
+startup-message = This is %(my_name)s irdbd
-sql-database = %(irdb_db_name)s
-sql-username = irdb
-sql-password = %(irdb_db_pass)s
-bpki-ta = %(my_name)s-TA.cer
-rpkid-cert = %(my_name)s-RPKI.cer
-irdbd-cert = %(my_name)s-IRDB.cer
-irdbd-key = %(my_name)s-IRDB.key
-http-url = http://localhost:%(irdb_port)d/
-enable_tracebacks = yes
+sql-database = %(irdb_db_name)s
+sql-username = irdb
+sql-password = %(irdb_db_pass)s
+bpki-ta = %(my_name)s-TA.cer
+rpkid-cert = %(my_name)s-RPKI.cer
+irdbd-cert = %(my_name)s-IRDB.cer
+irdbd-key = %(my_name)s-IRDB.key
+http-url = http://localhost:%(irdb_port)d/
+enable_tracebacks = yes
[irbe_cli]
-rpkid-bpki-ta = %(my_name)s-TA.cer
-rpkid-cert = %(my_name)s-RPKI.cer
-rpkid-irbe-cert = %(my_name)s-IRBE.cer
-rpkid-irbe-key = %(my_name)s-IRBE.key
-rpkid-url = http://localhost:%(rpki_port)d/left-right
-enable_tracebacks = yes
+rpkid-bpki-ta = %(my_name)s-TA.cer
+rpkid-cert = %(my_name)s-RPKI.cer
+rpkid-irbe-cert = %(my_name)s-IRBE.cer
+rpkid-irbe-key = %(my_name)s-IRBE.key
+rpkid-url = http://localhost:%(rpki_port)d/left-right
+enable_tracebacks = yes
[rpkid]
-startup-message = This is %(my_name)s rpkid
+startup-message = This is %(my_name)s rpkid
-sql-database = %(rpki_db_name)s
-sql-username = rpki
-sql-password = %(rpki_db_pass)s
+sql-database = %(rpki_db_name)s
+sql-username = rpki
+sql-password = %(rpki_db_pass)s
-bpki-ta = %(my_name)s-TA.cer
-rpkid-key = %(my_name)s-RPKI.key
-rpkid-cert = %(my_name)s-RPKI.cer
-irdb-cert = %(my_name)s-IRDB.cer
-irbe-cert = %(my_name)s-IRBE.cer
+bpki-ta = %(my_name)s-TA.cer
+rpkid-key = %(my_name)s-RPKI.key
+rpkid-cert = %(my_name)s-RPKI.cer
+irdb-cert = %(my_name)s-IRDB.cer
+irbe-cert = %(my_name)s-IRBE.cer
-irdb-url = http://localhost:%(irdb_port)d/
+irdb-url = http://localhost:%(irdb_port)d/
+
+server-host = localhost
+server-port = %(rpki_port)d
-server-host = localhost
-server-port = %(rpki_port)d
+use-internal-cron = false
+enable_tracebacks = yes
-use-internal-cron = false
-enable_tracebacks = yes
+[myrpki]
+start_rpkid = yes
+start_irdbd = yes
+start_pubd = no
'''
rootd_fmt_1 = '''\
@@ -1525,24 +1510,28 @@ rootd-bpki-cert = %(rootd_name)s-RPKI.cer
rootd-bpki-key = %(rootd_name)s-RPKI.key
rootd-bpki-crl = %(rootd_name)s-TA.crl
child-bpki-cert = %(rootd_name)s-TA-%(rpkid_name)s-SELF.cer
+pubd-bpki-cert = %(rootd_name)s-TA-%(pubd_name)s-TA.cer
server-port = %(rootd_port)s
-rpki-root-dir = %(rsyncd_dir)sroot
-rpki-base-uri = %(rootd_sia)sroot/
-rpki-root-cert-uri = %(rootd_sia)sroot.cer
+rpki-class-name = trunk
-rpki-root-key = root.key
-rpki-root-cert = root.cer
+pubd-contact-uri = http://localhost:%(pubd_port)d/client/%(rootd_handle)s
+
+rpki-root-cert-file = root.cer
+rpki-root-cert-uri = %(rootd_sia)sroot.cer
+rpki-root-key-file = root.key
-rpki-subject-pkcs10 = %(rootd_name)s.subject.pkcs10
+rpki-subject-cert-file = trunk.cer
+rpki-subject-cert-uri = %(rootd_sia)sroot/trunk.cer
+rpki-subject-pkcs10-file= trunk.p10
rpki-subject-lifetime = %(lifetime)s
-rpki-root-crl = root.crl
-rpki-root-manifest = root.mft
+rpki-root-crl-file = root.crl
+rpki-root-crl-uri = %(rootd_sia)sroot/root.crl
-rpki-class-name = trunk
-rpki-subject-cert = trunk.cer
+rpki-root-manifest-file = root.mft
+rpki-root-manifest-uri = %(rootd_sia)sroot/root.mft
include-bpki-crl = yes
enable_tracebacks = yes
@@ -1579,7 +1568,7 @@ certificatePolicies = critical, @rpki_certificate_policy
[rpki_certificate_policy]
-policyIdentifier = 1.3.6.1.5.5.7.14.2
+policyIdentifier = 1.3.6.1.5.5.7.14.2
'''
rootd_fmt_2 = '''\
@@ -1602,8 +1591,7 @@ awk '!/-----(BEGIN|END)/' >>%(rootd_name)s.tal &&
-outform DER \
-extfile %(rootd_name)s.conf \
-extensions req_x509_rpki_ext \
- -signkey root.key &&
-ln -f root.cer %(rsyncd_dir)s
+ -signkey root.key
'''
rcynic_fmt_1 = '''\
@@ -1636,6 +1624,7 @@ sql-database = %(pubd_db_name)s
sql-username = %(pubd_db_user)s
sql-password = %(pubd_db_pass)s
bpki-ta = %(pubd_name)s-TA.cer
+pubd-crl = %(pubd_name)s-TA.crl
pubd-cert = %(pubd_name)s-PUBD.cer
pubd-key = %(pubd_name)s-PUBD.key
irbe-cert = %(pubd_name)s-IRBE.cer
@@ -1643,6 +1632,17 @@ server-host = localhost
server-port = %(pubd_port)d
publication-base = %(pubd_dir)s
enable_tracebacks = yes
+
+[irdbd]
+
+sql-database = %(irdb_db_name)s
+sql-username = irdb
+sql-password = %(irdb_db_pass)s
+
+[myrpki]
+start_rpkid = no
+start_irdbd = no
+start_pubd = yes
'''
main()
diff --git a/ca/tests/sql-cleaner.py b/ca/tests/sql-cleaner.py
index ca88d456..c518b77b 100644
--- a/ca/tests/sql-cleaner.py
+++ b/ca/tests/sql-cleaner.py
@@ -19,43 +19,33 @@
"""
import rpki.config
-import rpki.sql_schemas
from rpki.mysql_import import MySQLdb
-cfg = rpki.config.parser(None, "yamltest", allow_missing = True)
+cfg = rpki.config.parser(section = "yamltest", allow_missing = True)
for name in ("rpkid", "irdbd", "pubd"):
- username = cfg.get("%s_sql_username" % name, name[:4])
- password = cfg.get("%s_sql_password" % name, "fnord")
+ username = cfg.get("%s_sql_username" % name, name[:4])
+ password = cfg.get("%s_sql_password" % name, "fnord")
- schema = []
- for line in getattr(rpki.sql_schemas, name, "").splitlines():
- schema.extend(line.partition("--")[0].split())
- schema = " ".join(schema).strip(";").split(";")
- schema = [statement.strip() for statement in schema if statement and "DROP TABLE" not in statement]
+ db = MySQLdb.connect(user = username, passwd = password)
+ cur = db.cursor()
- db = MySQLdb.connect(user = username, passwd = password)
- cur = db.cursor()
+ cur.execute("SHOW DATABASES")
- cur.execute("SHOW DATABASES")
+ databases = [r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit()]
- databases = [r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit()]
+ for database in databases:
- for database in databases:
+ cur.execute("USE " + database)
- cur.execute("USE " + database)
+ cur.execute("SHOW TABLES")
+ tables = [r[0] for r in cur.fetchall()]
- cur.execute("SHOW TABLES")
- tables = [r[0] for r in cur.fetchall()]
+ cur.execute("SET foreign_key_checks = 0")
+ for table in tables:
+ cur.execute("DROP TABLE %s" % table)
+ cur.execute("SET foreign_key_checks = 1")
- cur.execute("SET foreign_key_checks = 0")
- for table in tables:
- cur.execute("DROP TABLE %s" % table)
- cur.execute("SET foreign_key_checks = 1")
-
- for statement in schema:
- cur.execute(statement)
-
- cur.close()
- db.close()
+ cur.close()
+ db.close()
diff --git a/ca/tests/sql-dumper.py b/ca/tests/sql-dumper.py
index 19cc1b34..af24f2d4 100644
--- a/ca/tests/sql-dumper.py
+++ b/ca/tests/sql-dumper.py
@@ -22,22 +22,22 @@ import subprocess
import rpki.config
from rpki.mysql_import import MySQLdb
-cfg = rpki.config.parser(None, "yamltest", allow_missing = True)
+cfg = rpki.config.parser(section = "yamltest", allow_missing = True)
for name in ("rpkid", "irdbd", "pubd"):
- username = cfg.get("%s_sql_username" % name, name[:4])
- password = cfg.get("%s_sql_password" % name, "fnord")
+ username = cfg.get("%s_sql_username" % name, name[:4])
+ password = cfg.get("%s_sql_password" % name, "fnord")
- cmd = ["mysqldump", "-u", username, "-p" + password, "--databases"]
+ cmd = ["mysqldump", "-u", username, "-p" + password, "--databases"]
- db = MySQLdb.connect(user = username, passwd = password)
- cur = db.cursor()
+ db = MySQLdb.connect(user = username, passwd = password)
+ cur = db.cursor()
- cur.execute("SHOW DATABASES")
- cmd.extend(r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit())
+ cur.execute("SHOW DATABASES")
+ cmd.extend(r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit())
- cur.close()
- db.close()
+ cur.close()
+ db.close()
- subprocess.check_call(cmd, stdout = open("backup.%s.sql" % name, "w"))
+ subprocess.check_call(cmd, stdout = open("backup.%s.sql" % name, "w"))
diff --git a/ca/tests/test-rrdp.py b/ca/tests/test-rrdp.py
new file mode 100755
index 00000000..97797444
--- /dev/null
+++ b/ca/tests/test-rrdp.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+# $Id$
+#
+# Copyright (C) 2013 Dragon Research Labs ("DRL")
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH
+# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT,
+# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Proof-of-concept test driver for RRDP code. Still fairly kludgy in places.
+"""
+
+import os
+import sys
+import glob
+import time
+import signal
+import textwrap
+import argparse
+import subprocess
+
+parser = argparse.ArgumentParser(description = __doc__)
+parser.add_argument("--use-smoketest", action = "store_true")
+parser.add_argument("--yaml-file", default = "smoketest.2.yaml")
+parser.add_argument("--delay", type = int, default = 30)
+parser.add_argument("--exhaustive", action = "store_true")
+parser.add_argument("--skip-daemons", action = "store_true")
+parser.add_argument("--dry-run", action = "store_true")
+args = parser.parse_args()
+
+def log(msg):
+ sys.stdout.write(msg + "\n")
+ sys.stdout.flush()
+
+def run(*argv):
+ log("Running: " + " ".join(argv))
+ if not args.dry_run:
+ subprocess.check_call(argv)
+
+def dataglob(pattern):
+ return glob.iglob(os.path.join(("smoketest.dir" if args.use_smoketest else "yamltest.dir/RIR"), pattern))
+
+def snapshot_to_serial(fn):
+ return int(os.path.splitext(os.path.basename(fn))[0])
+
+def delta_to_serial(fn):
+ return int(os.path.splitext(os.path.basename(fn))[0])
+
+top = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "..", ".."))
+
+rrdp_test_tool = os.path.join(top, "potpourri/rrdp-test-tool")
+rcynic = os.path.join(top, "rp/rcynic/rcynic")
+rcynic_text = os.path.join(top, "rp/rcynic/rcynic-text")
+
+with open("rcynic-rrdp.conf", "w") as f:
+ f.write(textwrap.dedent('''# Automatically generated for RRDP tests, do not edit.
+ [rcynic]
+ xml-summary = rcynic.xml
+ jitter = 0
+ use-links = yes
+ use-syslog = no
+ use-stderr = yes
+ log-level = log_debug
+ run-rsync = no
+ '''))
+ if args.use_smoketest:
+ f.write("trust-anchor = smoketest.dir/root.cer\n")
+ else:
+ f.write("trust-anchor = yamltest.dir/RIR/publication/RIR-root/root.cer\n")
+
+if args.skip_daemons:
+ log("--skip-daemons specified, so running neither smoketest nor yamltest")
+elif args.use_smoketest:
+ run("python", "smoketest.py", args.yaml_file)
+else:
+ run("python", "sql-cleaner.py")
+ class GotSIGUSR1(Exception):
+ pass
+ def handle_sigusr1(signum, frame):
+ raise GotSIGUSR1
+ old_sigusr1 = signal.signal(signal.SIGUSR1, handle_sigusr1)
+ cmd = ("python", "yamltest.py", args.yaml_file, "--notify-when-startup-complete", str(os.getpid()))
+ log("Running: " + " ".join(cmd))
+ yamltest = subprocess.Popen(cmd)
+ log("Waiting for SIGUSR1 from yamltest")
+ try:
+ while True:
+ signal.pause()
+ except GotSIGUSR1:
+ signal.signal(signal.SIGUSR1, old_sigusr1)
+ log("Sleeping %s" % args.delay)
+ time.sleep(args.delay)
+ yamltest.terminate()
+
+snapshots = dict((snapshot_to_serial(fn), fn) for fn in dataglob("rrdp-publication/*/snapshot/*.xml"))
+deltas = dict((delta_to_serial(fn), fn) for fn in dataglob("rrdp-publication/*/deltas/*.xml"))
+
+for snapshot in sorted(snapshots):
+
+ time.sleep(1)
+ run("rm", "-rf", "rcynic-data")
+ run(rrdp_test_tool, snapshots[snapshot])
+ run(rcynic, "-c", "rcynic-rrdp.conf")
+ run(rcynic_text, "rcynic.xml")
+
+ for delta in sorted(deltas):
+ if delta > snapshot:
+ time.sleep(1)
+ run(rrdp_test_tool, deltas[delta])
+ run(rcynic, "-c", "rcynic-rrdp.conf")
+ run(rcynic_text, "rcynic.xml")
+
+ if not args.exhaustive:
+ break
diff --git a/ca/tests/testpoke.py b/ca/tests/testpoke.py
index c28ed397..60cc5690 100644
--- a/ca/tests/testpoke.py
+++ b/ca/tests/testpoke.py
@@ -58,80 +58,80 @@ yaml_data = yaml.load(args.yaml)
yaml_cmd = args.request
if yaml_cmd is None and len(yaml_data["requests"]) == 1:
- yaml_cmd = yaml_data["requests"].keys()[0]
+ yaml_cmd = yaml_data["requests"].keys()[0]
yaml_req = yaml_data["requests"][yaml_cmd]
def get_PEM(name, cls, y = yaml_data):
- if name in y:
- return cls(PEM = y[name])
- if name + "-file" in y:
- return cls(PEM_file = y[name + "-file"])
- return None
+ if name in y:
+ return cls(PEM = y[name])
+ if name + "-file" in y:
+ return cls(PEM_file = y[name + "-file"])
+ return None
def get_PEM_chain(name, cert = None):
- chain = []
- if cert is not None:
- chain.append(cert)
- if name in yaml_data:
- chain.extend([rpki.x509.X509(PEM = x) for x in yaml_data[name]])
- elif name + "-file" in yaml_data:
- chain.extend([rpki.x509.X509(PEM_file = x) for x in yaml_data[name + "-file"]])
- return chain
+ chain = []
+ if cert is not None:
+ chain.append(cert)
+ if name in yaml_data:
+ chain.extend(rpki.x509.X509(PEM = x) for x in yaml_data[name])
+ elif name + "-file" in yaml_data:
+ chain.extend(rpki.x509.X509(PEM_file = x) for x in yaml_data[name + "-file"])
+ return chain
def query_up_down(q_pdu):
- q_msg = rpki.up_down.message_pdu.make_query(
- payload = q_pdu,
- sender = yaml_data["sender-id"],
- recipient = yaml_data["recipient-id"])
- q_der = rpki.up_down.cms_msg().wrap(q_msg, cms_key, cms_certs, cms_crl)
-
- def done(r_der):
- global last_cms_timestamp
- r_cms = rpki.up_down.cms_msg(DER = r_der)
- r_msg = r_cms.unwrap([cms_ta] + cms_ca_certs)
- last_cms_timestamp = r_cms.check_replay(last_cms_timestamp)
- print r_cms.pretty_print_content()
- try:
- r_msg.payload.check_response()
- except (rpki.async.ExitNow, SystemExit):
- raise
- except Exception, e:
- fail(e)
-
- rpki.http.want_persistent_client = False
-
- rpki.http.client(
- msg = q_der,
- url = yaml_data["posturl"],
- callback = done,
- errback = fail,
- content_type = rpki.up_down.content_type)
+ q_msg = rpki.up_down.message_pdu.make_query(
+ payload = q_pdu,
+ sender = yaml_data["sender-id"],
+ recipient = yaml_data["recipient-id"])
+ q_der = rpki.up_down.cms_msg_saxify().wrap(q_msg, cms_key, cms_certs, cms_crl)
+
+ def done(r_der):
+ global last_cms_timestamp
+ r_cms = rpki.up_down.cms_msg_saxify(DER = r_der)
+ r_msg = r_cms.unwrap([cms_ta] + cms_ca_certs)
+ last_cms_timestamp = r_cms.check_replay(last_cms_timestamp)
+ print r_cms.pretty_print_content()
+ try:
+ r_msg.payload.check_response()
+ except (rpki.async.ExitNow, SystemExit):
+ raise
+ except Exception, e:
+ fail(e)
+
+ rpki.http.want_persistent_client = False
+
+ rpki.http.client(
+ msg = q_der,
+ url = yaml_data["posturl"],
+ callback = done,
+ errback = fail,
+ content_type = rpki.up_down.content_type)
def do_list():
- query_up_down(rpki.up_down.list_pdu())
+ query_up_down(rpki.up_down.list_pdu())
def do_issue():
- q_pdu = rpki.up_down.issue_pdu()
- req_key = get_PEM("cert-request-key", rpki.x509.RSA, yaml_req) or cms_key
- q_pdu.class_name = yaml_req["class"]
- q_pdu.pkcs10 = rpki.x509.PKCS10.create(
- keypair = req_key,
- is_ca = True,
- caRepository = yaml_req["sia"][0],
- rpkiManifest = yaml_req["sia"][0] + req_key.gSKI() + ".mft")
- query_up_down(q_pdu)
+ q_pdu = rpki.up_down.issue_pdu()
+ req_key = get_PEM("cert-request-key", rpki.x509.RSA, yaml_req) or cms_key
+ q_pdu.class_name = yaml_req["class"]
+ q_pdu.pkcs10 = rpki.x509.PKCS10.create(
+ keypair = req_key,
+ is_ca = True,
+ caRepository = yaml_req["sia"][0],
+ rpkiManifest = yaml_req["sia"][0] + req_key.gSKI() + ".mft")
+ query_up_down(q_pdu)
def do_revoke():
- q_pdu = rpki.up_down.revoke_pdu()
- q_pdu.class_name = yaml_req["class"]
- q_pdu.ski = yaml_req["ski"]
- query_up_down(q_pdu)
+ q_pdu = rpki.up_down.revoke_pdu()
+ q_pdu.class_name = yaml_req["class"]
+ q_pdu.ski = yaml_req["ski"]
+ query_up_down(q_pdu)
dispatch = { "list" : do_list, "issue" : do_issue, "revoke" : do_revoke }
def fail(e): # pylint: disable=W0621
- sys.exit("Testpoke failed: %s" % e)
+ sys.exit("Testpoke failed: %s" % e)
cms_ta = get_PEM("cms-ca-cert", rpki.x509.X509)
cms_cert = get_PEM("cms-cert", rpki.x509.X509)
@@ -143,7 +143,7 @@ cms_ca_certs = get_PEM_chain("cms-ca-certs")
last_cms_timestamp = None
try:
- dispatch[yaml_req["type"]]()
- rpki.async.event_loop()
+ dispatch[yaml_req["type"]]()
+ rpki.async.event_loop()
except Exception, e:
- fail(e)
+ fail(e)
diff --git a/ca/tests/xml-parse-test.py b/ca/tests/xml-parse-test.py
index 5ea25492..f24d5683 100644
--- a/ca/tests/xml-parse-test.py
+++ b/ca/tests/xml-parse-test.py
@@ -28,92 +28,102 @@
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
-import glob, lxml.etree, lxml.sax
-import rpki.up_down, rpki.left_right, rpki.publication, rpki.relaxng
+import glob
+import lxml.etree
+import rpki.up_down
+import rpki.left_right
+import rpki.publication
+import rpki.publication_control
+import rpki.relaxng
verbose = False
-def test(fileglob, rng, sax_handler, encoding, tester = None):
- files = glob.glob(fileglob)
- files.sort()
- for f in files:
- print "<!--", f, "-->"
- handler = sax_handler()
- elt_in = lxml.etree.parse(f).getroot()
- if verbose:
- print "<!-- Input -->"
- print lxml.etree.tostring(elt_in, pretty_print = True, encoding = encoding, xml_declaration = True)
- rng.assertValid(elt_in)
- lxml.sax.saxify(elt_in, handler)
- elt_out = handler.result.toXML()
- if verbose:
- print "<!-- Output -->"
- print lxml.etree.tostring(elt_out, pretty_print = True, encoding = encoding, xml_declaration = True)
- rng.assertValid(elt_out)
- if tester:
- tester(elt_in, elt_out, handler.result)
- if verbose:
- print
+def test(fileglob, rng, parser, encoding, tester = None):
+ files = glob.glob(fileglob)
+ files.sort()
+ for f in files:
+ print "<!--", f, "-->"
+ elt_in = lxml.etree.parse(f).getroot()
+ if verbose:
+ print "<!-- Input -->"
+ print lxml.etree.tostring(elt_in, pretty_print = True, encoding = encoding, xml_declaration = True)
+ rng.assertValid(elt_in)
+ parsed = parser.fromXML(elt_in)
+ elt_out = parsed.toXML()
+ if verbose:
+ print "<!-- Output -->"
+ print lxml.etree.tostring(elt_out, pretty_print = True, encoding = encoding, xml_declaration = True)
+ rng.assertValid(elt_out)
+ if tester:
+ tester(elt_in, elt_out, parsed)
+ if verbose:
+ print
def pprint(pairs):
- if verbose:
- for thing, name in pairs:
- if thing is not None:
- print "[%s]" % name
- print thing.get_POW().pprint()
+ if verbose:
+ for thing, name in pairs:
+ if thing is not None:
+ print "[%s]" % name
+ print thing.get_POW().pprint()
def ud_tester(elt_in, elt_out, msg):
- assert isinstance(msg, rpki.up_down.message_pdu)
- if isinstance(msg.payload, rpki.up_down.list_response_pdu):
- for c in msg.payload.classes:
- pprint([(c.certs[i].cert, ("%s certificate #%d" % (c.class_name, i))) for i in xrange(len(c.certs))] + [(c.issuer, ("%s issuer" % c.class_name))])
+ assert isinstance(msg, rpki.up_down.message_pdu)
+ if isinstance(msg.payload, rpki.up_down.list_response_pdu):
+ for c in msg.payload.classes:
+ pprint([(c.certs[i].cert, ("%s certificate #%d" % (c.class_name, i))) for i in xrange(len(c.certs))] + [(c.issuer, ("%s issuer" % c.class_name))])
def lr_tester(elt_in, elt_out, msg):
- assert isinstance(msg, rpki.left_right.msg)
- for obj in msg:
- if isinstance(obj, rpki.left_right.self_elt):
- pprint(((obj.bpki_cert, "BPKI cert"),
- (obj.bpki_glue, "BPKI glue")))
- if isinstance(obj, rpki.left_right.bsc_elt):
- pprint(((obj.signing_cert, "Signing certificate"),
- (obj.signing_cert_crl, "Signing certificate CRL")))
- # (obj.pkcs10_request, "PKCS #10 request")
- if isinstance(obj, rpki.left_right.parent_elt):
- pprint(((obj.bpki_cms_cert, "CMS certificate"),
- (obj.bpki_cms_glue, "CMS glue")))
- if isinstance(obj, (rpki.left_right.child_elt, rpki.left_right.repository_elt)):
- pprint(((obj.bpki_cert, "Certificate"),
- (obj.bpki_glue, "Glue")))
+ assert isinstance(msg, rpki.left_right.msg)
+ for obj in msg:
+ if isinstance(obj, rpki.left_right.self_elt):
+ pprint(((obj.bpki_cert, "BPKI cert"),
+ (obj.bpki_glue, "BPKI glue")))
+ if isinstance(obj, rpki.left_right.bsc_elt):
+ pprint(((obj.signing_cert, "Signing certificate"),
+ (obj.signing_cert_crl, "Signing certificate CRL")))
+ # (obj.pkcs10_request, "PKCS #10 request")
+ if isinstance(obj, rpki.left_right.parent_elt):
+ pprint(((obj.bpki_cert, "BPKI certificate"),
+ (obj.bpki_glue, "BPKI glue")))
+ if isinstance(obj, (rpki.left_right.child_elt, rpki.left_right.repository_elt)):
+ pprint(((obj.bpki_cert, "BPKI certificate"),
+ (obj.bpki_glue, "BPKI glue")))
def pp_tester(elt_in, elt_out, msg):
- assert isinstance(msg, rpki.publication.msg)
- for obj in msg:
- if isinstance(obj, rpki.publication.client_elt):
- pprint(((obj.bpki_cert, "BPKI cert"),
- (obj.bpki_glue, "BPKI glue")))
- if isinstance(obj, rpki.publication.certificate_elt):
- pprint(((obj.payload, "RPKI cert"),))
- if isinstance(obj, rpki.publication.crl_elt):
- pprint(((obj.payload, "RPKI CRL"),))
- if isinstance(obj, rpki.publication.manifest_elt):
- pprint(((obj.payload, "RPKI manifest"),))
- if isinstance(obj, rpki.publication.roa_elt):
- pprint(((obj.payload, "ROA"),))
+ assert isinstance(msg, rpki.publication.msg)
+ for obj in msg:
+ if isinstance(obj, rpki.publication.publish_elt):
+ pprint(((obj.payload, "Publish object"),))
+ if isinstance(obj, rpki.publication.withdraw_elt):
+ pprint(((None, "Withdraw object"),))
+
+def pc_tester(elt_in, elt_out, msg):
+ assert isinstance(msg, rpki.publication_control.msg)
+ for obj in msg:
+ if isinstance(obj, rpki.publication_control.client_elt):
+ pprint(((obj.bpki_cert, "BPKI cert"),
+ (obj.bpki_glue, "BPKI glue")))
test(fileglob = "up-down-protocol-samples/*.xml",
rng = rpki.relaxng.up_down,
- sax_handler = rpki.up_down.sax_handler,
+ parser = rpki.up_down.msg,
encoding = "utf-8",
tester = ud_tester)
test(fileglob = "left-right-protocol-samples/*.xml",
rng = rpki.relaxng.left_right,
- sax_handler = rpki.left_right.sax_handler,
+ parser = rpki.left_right.msg,
encoding = "us-ascii",
tester = lr_tester)
test(fileglob = "publication-protocol-samples/*.xml",
rng = rpki.relaxng.publication,
- sax_handler = rpki.publication.sax_handler,
+ parser = rpki.publication.msg,
encoding = "us-ascii",
tester = pp_tester)
+
+test(fileglob = "publication-control-protocol-samples/*.xml",
+ rng = rpki.relaxng.publication_control,
+ parser = rpki.publication_control.msg,
+ encoding = "us-ascii",
+ tester = pc_tester)
diff --git a/ca/tests/yamlconf.py b/ca/tests/yamlconf.py
index f1073c92..52c4da26 100644
--- a/ca/tests/yamlconf.py
+++ b/ca/tests/yamlconf.py
@@ -75,786 +75,787 @@ config_overrides = {
"pubd_sql_username" : "pubd", "pubd_sql_password" : "fnord" }
def cleanpath(*names):
- return os.path.normpath(os.path.join(*names))
+ return os.path.normpath(os.path.join(*names))
this_dir = os.getcwd()
test_dir = None
rpki_conf = None
class roa_request(object):
- """
- Representation of a ROA request.
- """
-
- def __init__(self, asn, ipv4, ipv6):
- self.asn = asn
- self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
- self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
-
- def __eq__(self, other):
- return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
-
- def __hash__(self):
- v4 = tuple(self.v4) if self.v4 is not None else None
- v6 = tuple(self.v6) if self.v6 is not None else None
- return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
-
- def __str__(self):
- if self.v4 and self.v6:
- return "%s: %s,%s" % (self.asn, self.v4, self.v6)
- else:
- return "%s: %s" % (self.asn, self.v4 or self.v6)
+ """
+ Representation of a ROA request.
+ """
- @classmethod
- def parse(cls, y):
- return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6"))
+ def __init__(self, asn, ipv4, ipv6):
+ self.asn = asn
+ self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
+ self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
-class router_cert(object):
- """
- Representation for a router_cert object.
- """
+ def __eq__(self, other):
+ return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
- _ecparams = None
+ def __hash__(self):
+ v4 = tuple(self.v4) if self.v4 is not None else None
+ v6 = tuple(self.v6) if self.v6 is not None else None
+ return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
- @classmethod
- def ecparams(cls):
- if cls._ecparams is None:
- cls._ecparams = rpki.x509.KeyParams.generateEC()
- return cls._ecparams
+ def __str__(self):
+ if self.v4 and self.v6:
+ return "%s: %s,%s" % (self.asn, self.v4, self.v6)
+ else:
+ return "%s: %s" % (self.asn, self.v4 or self.v6)
- def __init__(self, asn, router_id):
- self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
- self.router_id = router_id
- self.keypair = rpki.x509.ECDSA.generate(self.ecparams())
- self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
- self.gski = self.pkcs10.gSKI()
+ @classmethod
+ def parse(cls, y):
+ return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6"))
- def __eq__(self, other):
- return self.asn == other.asn and self.router_id == other.router_id and self.gski == other.gski
+class router_cert(object):
+ """
+ Representation for a router_cert object.
+ """
- def __hash__(self):
- return tuple(self.asn).__hash__() + self.router_id.__hash__() + self.gski.__hash__()
+ _ecparams = None
- def __str__(self):
- return "%s: %s: %s" % (self.asn, self.router_id, self.gski)
+ @classmethod
+ def ecparams(cls):
+ if cls._ecparams is None:
+ cls._ecparams = rpki.x509.KeyParams.generateEC()
+ return cls._ecparams
- @classmethod
- def parse(cls, yaml):
- return cls(yaml.get("asn"), yaml.get("router_id"))
+ def __init__(self, asn, router_id):
+ self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
+ self.router_id = router_id
+ self.keypair = rpki.x509.ECDSA.generate(params = self.ecparams(), quiet = True)
+ self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
+ self.gski = self.pkcs10.gSKI()
+ def __eq__(self, other):
+ return self.asn == other.asn and self.router_id == other.router_id and self.gski == other.gski
-class allocation_db(list):
- """
- Allocation database.
- """
-
- def __init__(self, y):
- list.__init__(self)
- self.root = allocation(y, self)
- assert self.root.is_root
- if self.root.crl_interval is None:
- self.root.crl_interval = 60 * 60
- if self.root.regen_margin is None:
- self.root.regen_margin = 24 * 60 * 60
- if self.root.base.valid_until is None:
- self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2)
- for a in self:
- if a.base.valid_until is None:
- a.base.valid_until = a.parent.base.valid_until
- if a.crl_interval is None:
- a.crl_interval = a.parent.crl_interval
- if a.regen_margin is None:
- a.regen_margin = a.parent.regen_margin
- self.root.closure()
- self.map = dict((a.name, a) for a in self)
- for a in self:
- if a.is_hosted:
- a.hosted_by = self.map[a.hosted_by]
- a.hosted_by.hosts.append(a)
- assert not a.is_root and not a.hosted_by.is_hosted
-
- def dump(self):
- for a in self:
- a.dump()
+ def __hash__(self):
+ return tuple(self.asn).__hash__() + self.router_id.__hash__() + self.gski.__hash__()
+ def __str__(self):
+ return "%s: %s: %s" % (self.asn, self.router_id, self.gski)
-class allocation(object):
- """
- One entity in our allocation database. Every entity in the database
- is assumed to hold resources. Entities that don't have the
- hosted_by property run their own copies of rpkid, irdbd, and pubd.
- """
-
- base_port = 4400
- base_engine = -1
- parent = None
- crl_interval = None
- regen_margin = None
- engine = -1
- rpkid_port = 4404
- irdbd_port = 4403
- pubd_port = 4402
- rootd_port = 4401
- rsync_port = 873
-
- @classmethod
- def allocate_port(cls):
- cls.base_port += 1
- return cls.base_port
-
- @classmethod
- def allocate_engine(cls):
- cls.base_engine += 1
- return cls.base_engine
-
- def __init__(self, y, db, parent = None):
- db.append(self)
- self.name = y["name"]
- self.parent = parent
- self.kids = [allocation(k, db, self) for k in y.get("kids", ())]
- valid_until = None
- if "valid_until" in y:
- valid_until = rpki.sundial.datetime.from_datetime(y.get("valid_until"))
- if valid_until is None and "valid_for" in y:
- valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(y["valid_for"])
- self.base = rpki.resource_set.resource_bag(
- asn = rpki.resource_set.resource_set_as(y.get("asn")),
- v4 = rpki.resource_set.resource_set_ipv4(y.get("ipv4")),
- v6 = rpki.resource_set.resource_set_ipv6(y.get("ipv6")),
- valid_until = valid_until)
- if "crl_interval" in y:
- self.crl_interval = rpki.sundial.timedelta.parse(y["crl_interval"]).convert_to_seconds()
- if "regen_margin" in y:
- self.regen_margin = rpki.sundial.timedelta.parse(y["regen_margin"]).convert_to_seconds()
- if "ghostbusters" in y:
- self.ghostbusters = y.get("ghostbusters")
- elif "ghostbuster" in y:
- self.ghostbusters = [y.get("ghostbuster")]
- else:
- self.ghostbusters = []
- self.roa_requests = [roa_request.parse(r) for r in y.get("roa_request", ())]
- self.router_certs = [router_cert.parse(r) for r in y.get("router_cert", ())]
- for r in self.roa_requests:
- if r.v4:
- self.base.v4 |= r.v4.to_resource_set()
- if r.v6:
- self.base.v6 |= r.v6.to_resource_set()
- for r in self.router_certs:
- self.base.asn |= r.asn
- self.hosted_by = y.get("hosted_by")
- self.hosts = []
- if not self.is_hosted:
- self.engine = self.allocate_engine()
- if loopback and not self.is_hosted:
- self.rpkid_port = self.allocate_port()
- self.irdbd_port = self.allocate_port()
- if loopback and self.runs_pubd:
- self.pubd_port = self.allocate_port()
- self.rsync_port = self.allocate_port()
- if loopback and self.is_root:
- self.rootd_port = self.allocate_port()
-
- def closure(self):
- resources = self.base
- for kid in self.kids:
- resources |= kid.closure()
- self.resources = resources
- return resources
-
- @property
- def hostname(self):
- if loopback:
- return "localhost"
- elif dns_suffix:
- return self.name + "." + dns_suffix.lstrip(".")
- else:
- return self.name
+ @classmethod
+ def parse(cls, yaml):
+ return cls(yaml.get("asn"), yaml.get("router_id"))
- @property
- def rsync_server(self):
- if loopback:
- return "%s:%s" % (self.pubd.hostname, self.pubd.rsync_port)
- else:
- return self.pubd.hostname
- def dump(self):
- if not quiet:
- print str(self)
-
- def __str__(self):
- s = self.name + ":\n"
- if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn
- if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
- if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
- if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
- if self.parent: s += " Up: %s\n" % self.parent.name
- if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name
- if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts)
- for r in self.roa_requests: s += " ROA: %s\n" % r
- if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port
- if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port
- if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port
- if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port
- if self.is_root: s += " TPort: %s\n" % self.rootd_port
- return s + " Until: %s\n" % self.resources.valid_until
-
- @property
- def is_root(self):
- return self.parent is None
-
- @property
- def is_hosted(self):
- return self.hosted_by is not None
-
- @property
- def runs_pubd(self):
- return self.is_root or not (self.is_hosted or only_one_pubd)
-
- def path(self, *names):
- return cleanpath(test_dir, self.host.name, *names)
-
- def csvout(self, fn):
- path = self.path(fn)
- if not quiet:
- print "Writing", path
- return rpki.csv_utils.csv_writer(path)
-
- def up_down_url(self):
- return "http://%s:%d/up-down/%s/%s" % (self.parent.host.hostname,
- self.parent.host.rpkid_port,
- self.parent.name,
- self.name)
-
- def dump_asns(self, fn):
- with self.csvout(fn) as f:
- for k in self.kids:
- f.writerows((k.name, a) for a in k.resources.asn)
-
- def dump_prefixes(self, fn):
- with self.csvout(fn) as f:
- for k in self.kids:
- f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6))
-
- def dump_roas(self, fn):
- with self.csvout(fn) as f:
- for g1, r in enumerate(self.roa_requests):
- f.writerows((p, r.asn, "G%08d%08d" % (g1, g2))
- for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ())))
-
- def dump_ghostbusters(self, fn):
- if self.ghostbusters:
- path = self.path(fn)
- if not quiet:
- print "Writing", path
- with open(path, "w") as f:
- for i, g in enumerate(self.ghostbusters):
- if i > 0:
- f.write("\n")
- f.write(g)
-
- def dump_router_certificates(self, fn):
- if self.router_certs:
- path = self.path(fn)
- if not quiet:
- print "Writing", path
- xmlns = rpki.relaxng.router_certificate.xmlns
- xml = lxml.etree.Element(xmlns + "router_certificate_requests",
- version = rpki.relaxng.router_certificate.version,
- nsmap = rpki.relaxng.router_certificate.nsmap)
- for r in self.router_certs:
- x = lxml.etree.SubElement(xml, xmlns + "router_certificate_request",
- router_id = str(r.router_id),
- asn = str(r.asn),
- valid_until = str(self.resources.valid_until))
- x.text = r.pkcs10.get_Base64()
- rpki.relaxng.router_certificate.assertValid(xml)
- lxml.etree.ElementTree(xml).write(path, pretty_print = True)
-
- @property
- def pubd(self):
- s = self
- while not s.runs_pubd:
- s = s.parent
- return s
-
- @property
- def client_handle(self):
- path = []
- s = self
- if not flat_publication:
- while not s.runs_pubd:
- path.append(s)
- s = s.parent
- path.append(s)
- return ".".join(i.name for i in reversed(path))
-
- @property
- def host(self):
- return self.hosted_by or self
-
- @property
- def publication_base_directory(self):
- if not loopback and publication_base is not None:
- return publication_base
- else:
- return self.path("publication")
+class allocation_db(list):
+ """
+ Allocation database.
+ """
+
+ def __init__(self, y):
+ list.__init__(self)
+ self.root = allocation(y, self)
+ assert self.root.is_root
+ if self.root.crl_interval is None:
+ self.root.crl_interval = 60 * 60
+ if self.root.regen_margin is None:
+ self.root.regen_margin = 24 * 60 * 60
+ if self.root.base.valid_until is None:
+ self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2)
+ for a in self:
+ if a.base.valid_until is None:
+ a.base.valid_until = a.parent.base.valid_until
+ if a.crl_interval is None:
+ a.crl_interval = a.parent.crl_interval
+ if a.regen_margin is None:
+ a.regen_margin = a.parent.regen_margin
+ self.root.closure()
+ self.map = dict((a.name, a) for a in self)
+ for a in self:
+ if a.is_hosted:
+ a.hosted_by = self.map[a.hosted_by]
+ a.hosted_by.hosts.append(a)
+ assert not a.is_root and not a.hosted_by.is_hosted
+
+ def dump(self):
+ for a in self:
+ a.dump()
- @property
- def publication_root_directory(self):
- if not loopback and publication_root is not None:
- return publication_root
- else:
- return self.path("publication.root")
-
- def dump_conf(self):
-
- r = dict(
- handle = self.name,
- run_rpkid = str(not self.is_hosted),
- run_pubd = str(self.runs_pubd),
- run_rootd = str(self.is_root),
- irdbd_sql_username = "irdb",
- rpkid_sql_username = "rpki",
- rpkid_server_host = self.hostname,
- rpkid_server_port = str(self.rpkid_port),
- irdbd_server_host = "localhost",
- irdbd_server_port = str(self.irdbd_port),
- rootd_server_port = str(self.rootd_port),
- pubd_sql_username = "pubd",
- pubd_server_host = self.pubd.hostname,
- pubd_server_port = str(self.pubd.pubd_port),
- publication_rsync_server = self.rsync_server)
-
- if loopback:
- r.update(
- irdbd_sql_database = self.irdb_name,
- rpkid_sql_database = "rpki%d" % self.engine,
- pubd_sql_database = "pubd%d" % self.engine,
- bpki_servers_directory = self.path(),
- publication_base_directory = self.publication_base_directory)
-
- r.update(config_overrides)
-
- with open(self.path("rpki.conf"), "w") as f:
- f.write("# Automatically generated, do not edit\n")
- if not quiet:
- print "Writing", f.name
-
- section = None
- for line in open(rpki_conf):
- m = section_regexp.match(line)
- if m:
- section = m.group(1)
- m = variable_regexp.match(line)
- option = m.group(1) if m and section == "myrpki" else None
- if option and option in r:
- line = "%s = %s\n" % (option, r[option])
- f.write(line)
-
- def dump_rsyncd(self):
- lines = []
- if self.runs_pubd:
- lines.extend((
- "# Automatically generated, do not edit",
- "port = %d" % self.rsync_port,
- "address = %s" % self.hostname,
- "log file = rsyncd.log",
- "read only = yes",
- "use chroot = no",
- "[rpki]",
- "path = %s" % self.publication_base_directory,
- "comment = RPKI test"))
- if self.is_root:
- assert self.runs_pubd
- lines.extend((
- "[root]",
- "path = %s" % self.publication_root_directory,
- "comment = RPKI test root"))
- if lines:
- with open(self.path("rsyncd.conf"), "w") as f:
- if not quiet:
- print "Writing", f.name
- f.writelines(line + "\n" for line in lines)
-
- @property
- def irdb_name(self):
- return "irdb%d" % self.host.engine
-
- @property
- def irdb(self):
- prior_name = self.zoo.handle
- return rpki.irdb.database(
- self.irdb_name,
- on_entry = lambda: self.zoo.reset_identity(self.name),
- on_exit = lambda: self.zoo.reset_identity(prior_name))
-
- def syncdb(self):
- import django.core.management
- assert not self.is_hosted
- django.core.management.call_command("syncdb",
- database = self.irdb_name,
- load_initial_data = False,
- interactive = False,
- verbosity = 0)
-
- def hire_zookeeper(self):
- assert not self.is_hosted
- self._zoo = rpki.irdb.Zookeeper(
- cfg = rpki.config.parser(self.path("rpki.conf")),
- logstream = None if quiet else sys.stdout)
-
- @property
- def zoo(self):
- return self.host._zoo
-
- def dump_root(self):
-
- assert self.is_root and not self.is_hosted
-
- root_resources = rpki.resource_set.resource_bag(
- asn = rpki.resource_set.resource_set_as("0-4294967295"),
- v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"),
- v6 = rpki.resource_set.resource_set_ipv6("::/0"))
-
- root_key = rpki.x509.RSA.generate(quiet = True)
-
- root_uri = "rsync://%s/rpki/" % self.rsync_server
-
- root_sia = (root_uri, root_uri + "root.mft", None)
-
- root_cert = rpki.x509.X509.self_certify(
- keypair = root_key,
- subject_key = root_key.get_public(),
- serial = 1,
- sia = root_sia,
- notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365),
- resources = root_resources)
-
- with open(self.path("publication.root", "root.cer"), "wb") as f:
- f.write(root_cert.get_DER())
-
- with open(self.path("root.key"), "wb") as f:
- f.write(root_key.get_DER())
-
- with open(cleanpath(test_dir, "root.tal"), "w") as f:
- f.write("rsync://%s/root/root.cer\n\n%s" % (
- self.rsync_server, root_key.get_public().get_Base64()))
-
- def mkdir(self, *path):
- path = self.path(*path)
- if not quiet:
- print "Creating directory", path
- os.makedirs(path)
- def dump_sql(self):
- if not self.is_hosted:
- with open(self.path("rpkid.sql"), "w") as f:
+class allocation(object):
+ """
+ One entity in our allocation database. Every entity in the database
+ is assumed to hold resources. Entities that don't have the
+ hosted_by property run their own copies of rpkid, irdbd, and pubd.
+ """
+
+ base_port = 4400
+ base_engine = -1
+ parent = None
+ crl_interval = None
+ regen_margin = None
+ engine = -1
+ rpkid_port = 4404
+ irdbd_port = 4403
+ pubd_port = 4402
+ rootd_port = 4401
+ rsync_port = 873
+
+ @classmethod
+ def allocate_port(cls):
+ cls.base_port += 1
+ return cls.base_port
+
+ @classmethod
+ def allocate_engine(cls):
+ cls.base_engine += 1
+ return cls.base_engine
+
+ def __init__(self, y, db, parent = None):
+ db.append(self)
+ self.name = y["name"]
+ self.parent = parent
+ self.kids = [allocation(k, db, self) for k in y.get("kids", ())]
+ valid_until = None
+ if "valid_until" in y:
+ valid_until = rpki.sundial.datetime.from_datetime(y.get("valid_until"))
+ if valid_until is None and "valid_for" in y:
+ valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(y["valid_for"])
+ self.base = rpki.resource_set.resource_bag(
+ asn = rpki.resource_set.resource_set_as(y.get("asn")),
+ v4 = rpki.resource_set.resource_set_ipv4(y.get("ipv4")),
+ v6 = rpki.resource_set.resource_set_ipv6(y.get("ipv6")),
+ valid_until = valid_until)
+ if "crl_interval" in y:
+ self.crl_interval = rpki.sundial.timedelta.parse(y["crl_interval"]).convert_to_seconds()
+ if "regen_margin" in y:
+ self.regen_margin = rpki.sundial.timedelta.parse(y["regen_margin"]).convert_to_seconds()
+ if "ghostbusters" in y:
+ self.ghostbusters = y.get("ghostbusters")
+ elif "ghostbuster" in y:
+ self.ghostbusters = [y.get("ghostbuster")]
+ else:
+ self.ghostbusters = []
+ self.roa_requests = [roa_request.parse(r) for r in y.get("roa_request", ())]
+ self.router_certs = [router_cert.parse(r) for r in y.get("router_cert", ())]
+ for r in self.roa_requests:
+ if r.v4:
+ self.base.v4 |= r.v4.to_resource_set()
+ if r.v6:
+ self.base.v6 |= r.v6.to_resource_set()
+ for r in self.router_certs:
+ self.base.asn |= r.asn
+ self.hosted_by = y.get("hosted_by")
+ self.hosts = []
+ if not self.is_hosted:
+ self.engine = self.allocate_engine()
+ if loopback and not self.is_hosted:
+ self.rpkid_port = self.allocate_port()
+ self.irdbd_port = self.allocate_port()
+ if loopback and self.runs_pubd:
+ self.pubd_port = self.allocate_port()
+ self.rsync_port = self.allocate_port()
+ if loopback and self.is_root:
+ self.rootd_port = self.allocate_port()
+
+ def closure(self):
+ resources = self.base
+ for kid in self.kids:
+ resources |= kid.closure()
+ self.resources = resources
+ return resources
+
+ @property
+ def hostname(self):
+ if loopback:
+ return "localhost"
+ elif dns_suffix:
+ return self.name + "." + dns_suffix.lstrip(".")
+ else:
+ return self.name
+
+ @property
+ def rsync_server(self):
+ if loopback:
+ return "%s:%s" % (self.pubd.hostname, self.pubd.rsync_port)
+ else:
+ return self.pubd.hostname
+
+ def dump(self):
if not quiet:
- print "Writing", f.name
- f.write(rpki.sql_schemas.rpkid)
- if self.runs_pubd:
- with open(self.path("pubd.sql"), "w") as f:
+ print str(self)
+
+ def __str__(self):
+ s = self.name + ":\n"
+ if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn
+ if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
+ if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
+ if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
+ if self.parent: s += " Up: %s\n" % self.parent.name
+ if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name
+ if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts)
+ for r in self.roa_requests: s += " ROA: %s\n" % r
+ if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port
+ if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port
+ if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port
+ if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port
+ if self.is_root: s += " TPort: %s\n" % self.rootd_port
+ return s + " Until: %s\n" % self.resources.valid_until
+
+ @property
+ def is_root(self):
+ return self.parent is None
+
+ @property
+ def is_hosted(self):
+ return self.hosted_by is not None
+
+ @property
+ def runs_pubd(self):
+ return self.is_root or not (self.is_hosted or only_one_pubd)
+
+ def path(self, *names):
+ return cleanpath(test_dir, self.host.name, *names)
+
+ def csvout(self, fn):
+ path = self.path(fn)
if not quiet:
- print "Writing", f.name
- f.write(rpki.sql_schemas.pubd)
- if not self.is_hosted:
- username = config_overrides["irdbd_sql_username"]
- password = config_overrides["irdbd_sql_password"]
- cmd = ("mysqldump", "-u", username, "-p" + password, self.irdb_name)
- with open(self.path("irdbd.sql"), "w") as f:
+ print "Writing", path
+ return rpki.csv_utils.csv_writer(path)
+
+ def up_down_url(self):
+ return "http://%s:%d/up-down/%s/%s" % (self.parent.host.hostname,
+ self.parent.host.rpkid_port,
+ self.parent.name,
+ self.name)
+
+ def dump_asns(self, fn):
+ with self.csvout(fn) as f:
+ for k in self.kids:
+ f.writerows((k.name, a) for a in k.resources.asn)
+
+ def dump_prefixes(self, fn):
+ with self.csvout(fn) as f:
+ for k in self.kids:
+ f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6))
+
+ def dump_roas(self, fn):
+ with self.csvout(fn) as f:
+ for g1, r in enumerate(self.roa_requests):
+ f.writerows((p, r.asn, "G%08d%08d" % (g1, g2))
+ for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ())))
+
+ def dump_ghostbusters(self, fn):
+ if self.ghostbusters:
+ path = self.path(fn)
+ if not quiet:
+ print "Writing", path
+ with open(path, "w") as f:
+ for i, g in enumerate(self.ghostbusters):
+ if i > 0:
+ f.write("\n")
+ f.write(g)
+
+ def dump_router_certificates(self, fn):
+ if self.router_certs:
+ path = self.path(fn)
+ if not quiet:
+ print "Writing", path
+ xmlns = rpki.relaxng.router_certificate.xmlns
+ xml = lxml.etree.Element(xmlns + "router_certificate_requests",
+ version = rpki.relaxng.router_certificate.version,
+ nsmap = rpki.relaxng.router_certificate.nsmap)
+ for r in self.router_certs:
+ x = lxml.etree.SubElement(xml, xmlns + "router_certificate_request",
+ router_id = str(r.router_id),
+ asn = str(r.asn),
+ valid_until = str(self.resources.valid_until))
+ x.text = r.pkcs10.get_Base64()
+ rpki.relaxng.router_certificate.assertValid(xml)
+ lxml.etree.ElementTree(xml).write(path, pretty_print = True)
+
+ @property
+ def pubd(self):
+ s = self
+ while not s.runs_pubd:
+ s = s.parent
+ return s
+
+ @property
+ def client_handle(self):
+ path = []
+ s = self
+ if not flat_publication:
+ while not s.runs_pubd:
+ path.append(s)
+ s = s.parent
+ path.append(s)
+ return ".".join(i.name for i in reversed(path))
+
+ @property
+ def host(self):
+ return self.hosted_by or self
+
+ @property
+ def publication_base_directory(self):
+ if not loopback and publication_base is not None:
+ return publication_base
+ else:
+ return self.path("publication")
+
+ @property
+ def publication_root_directory(self):
+ if not loopback and publication_root is not None:
+ return publication_root
+ else:
+ return self.path("publication.root")
+
+ def dump_conf(self):
+
+ r = dict(
+ handle = self.name,
+ run_rpkid = str(not self.is_hosted),
+ run_pubd = str(self.runs_pubd),
+ run_rootd = str(self.is_root),
+ irdbd_sql_username = "irdb",
+ rpkid_sql_username = "rpki",
+ rpkid_server_host = self.hostname,
+ rpkid_server_port = str(self.rpkid_port),
+ irdbd_server_host = "localhost",
+ irdbd_server_port = str(self.irdbd_port),
+ rootd_server_port = str(self.rootd_port),
+ pubd_sql_username = "pubd",
+ pubd_server_host = self.pubd.hostname,
+ pubd_server_port = str(self.pubd.pubd_port),
+ publication_rsync_server = self.rsync_server)
+
+ if loopback:
+ r.update(
+ irdbd_sql_database = self.irdb_name,
+ rpkid_sql_database = "rpki%d" % self.engine,
+ pubd_sql_database = "pubd%d" % self.engine,
+ bpki_servers_directory = self.path(),
+ publication_base_directory = self.publication_base_directory)
+
+ r.update(config_overrides)
+
+ with open(self.path("rpki.conf"), "w") as f:
+ f.write("# Automatically generated, do not edit\n")
+ if not quiet:
+ print "Writing", f.name
+
+ section = None
+ for line in open(rpki_conf):
+ m = section_regexp.match(line)
+ if m:
+ section = m.group(1)
+ m = variable_regexp.match(line)
+ option = m.group(1) if m and section == "myrpki" else None
+ if option and option in r:
+ line = "%s = %s\n" % (option, r[option])
+ f.write(line)
+
+ def dump_rsyncd(self):
+ lines = []
+ if self.runs_pubd:
+ lines.extend((
+ "# Automatically generated, do not edit",
+ "port = %d" % self.rsync_port,
+ "address = %s" % self.hostname,
+ "log file = rsyncd.log",
+ "read only = yes",
+ "use chroot = no",
+ "[rpki]",
+ "path = %s" % self.publication_base_directory,
+ "comment = RPKI test"))
+ if self.is_root:
+ assert self.runs_pubd
+ lines.extend((
+ "[root]",
+ "path = %s" % self.publication_root_directory,
+ "comment = RPKI test root"))
+ if lines:
+ with open(self.path("rsyncd.conf"), "w") as f:
+ if not quiet:
+ print "Writing", f.name
+ f.writelines(line + "\n" for line in lines)
+
+ @property
+ def irdb_name(self):
+ return "irdb%d" % self.host.engine
+
+ @property
+ def irdb(self):
+ prior_name = self.zoo.handle
+ return rpki.irdb.database(
+ self.irdb_name,
+ on_entry = lambda: self.zoo.reset_identity(self.name),
+ on_exit = lambda: self.zoo.reset_identity(prior_name))
+
+ def syncdb(self):
+ import django.core.management
+ assert not self.is_hosted
+ django.core.management.call_command(
+ "syncdb",
+ verbosity = 0,
+ database = self.irdb_name,
+ migrate = True,
+ load_initial_data = False,
+ interactive = False)
+
+ def hire_zookeeper(self):
+ assert not self.is_hosted
+ self._zoo = rpki.irdb.Zookeeper(
+ cfg = rpki.config.parser(filename = self.path("rpki.conf")),
+ logstream = None if quiet else sys.stdout)
+
+ @property
+ def zoo(self):
+ return self.host._zoo
+
+ def dump_root(self):
+
+ assert self.is_root and not self.is_hosted
+
+ root_resources = rpki.resource_set.resource_bag(
+ asn = rpki.resource_set.resource_set_as("0-4294967295"),
+ v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"),
+ v6 = rpki.resource_set.resource_set_ipv6("::/0"))
+
+ root_key = rpki.x509.RSA.generate(quiet = True)
+
+ root_uri = "rsync://%s/rpki/" % self.rsync_server
+
+ root_sia = (root_uri, root_uri + "root.mft", None, rpki.publication.rrdp_sia_uri_kludge)
+
+ root_cert = rpki.x509.X509.self_certify(
+ keypair = root_key,
+ subject_key = root_key.get_public(),
+ serial = 1,
+ sia = root_sia,
+ notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365),
+ resources = root_resources)
+
+ with open(self.path("root.cer"), "wb") as f:
+ f.write(root_cert.get_DER())
+
+ with open(self.path("root.key"), "wb") as f:
+ f.write(root_key.get_DER())
+
+ with open(cleanpath(test_dir, "root.tal"), "w") as f:
+ f.write("rsync://%s/root/root.cer\n\n" % self.rsync_server)
+ f.write(root_key.get_public().get_Base64())
+
+ def mkdir(self, *path):
+ path = self.path(*path)
if not quiet:
- print "Writing", f.name
- subprocess.check_call(cmd, stdout = f)
+ print "Creating directory", path
+ os.makedirs(path)
+
+ def dump_sql(self):
+ if not self.is_hosted:
+ with open(self.path("rpkid.sql"), "w") as f:
+ if not quiet:
+ print "Writing", f.name
+ f.write(rpki.sql_schemas.rpkid)
+ if self.runs_pubd:
+ with open(self.path("pubd.sql"), "w") as f:
+ if not quiet:
+ print "Writing", f.name
+ f.write(rpki.sql_schemas.pubd)
+ if not self.is_hosted:
+ username = config_overrides["irdbd_sql_username"]
+ password = config_overrides["irdbd_sql_password"]
+ cmd = ("mysqldump", "-u", username, "-p" + password, self.irdb_name)
+ with open(self.path("irdbd.sql"), "w") as f:
+ if not quiet:
+ print "Writing", f.name
+ subprocess.check_call(cmd, stdout = f)
def pre_django_sql_setup(needed):
- username = config_overrides["irdbd_sql_username"]
- password = config_overrides["irdbd_sql_password"]
-
- # If we have the MySQL root password, just blow away and recreate
- # the required databases. Otherwise, check for missing databases,
- # then blow away all tables in the required databases. In either
- # case, we assume that the Django syncdb code will populate
- # databases as necessary, all we need to do here is provide empty
- # databases for the Django code to fill in.
+ username = config_overrides["irdbd_sql_username"]
+ password = config_overrides["irdbd_sql_password"]
+
+ # If we have the MySQL root password, just blow away and recreate
+ # the required databases. Otherwise, check for missing databases,
+ # then blow away all tables in the required databases. In either
+ # case, we assume that the Django syncdb code will populate
+ # databases as necessary, all we need to do here is provide empty
+ # databases for the Django code to fill in.
+
+ if mysql_rootpass is not None:
+ if mysql_rootpass:
+ db = MySQLdb.connect(user = mysql_rootuser, passwd = mysql_rootpass)
+ else:
+ db = MySQLdb.connect(user = mysql_rootuser)
+ cur = db.cursor()
+ for database in needed:
+ try:
+ cur.execute("DROP DATABASE IF EXISTS %s" % database)
+ except:
+ pass
+ cur.execute("CREATE DATABASE %s" % database)
+ cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % (
+ database, username), (password,))
- if mysql_rootpass is not None:
- if mysql_rootpass:
- db = MySQLdb.connect(user = mysql_rootuser, passwd = mysql_rootpass)
else:
- db = MySQLdb.connect(user = mysql_rootuser)
- cur = db.cursor()
- for database in needed:
- try:
- cur.execute("DROP DATABASE IF EXISTS %s" % database)
- except:
- pass
- cur.execute("CREATE DATABASE %s" % database)
- cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % (
- database, username), (password,))
-
- else:
- db = MySQLdb.connect(user = username, passwd = password)
- cur = db.cursor()
- cur.execute("SHOW DATABASES")
- existing = set(r[0] for r in cur.fetchall())
- if needed - existing:
- sys.stderr.write("The following databases are missing:\n")
- for database in sorted(needed - existing):
- sys.stderr.write(" %s\n" % database)
- sys.stderr.write("Please create them manually or put MySQL root password in my config file\n")
- sys.exit("Missing databases and MySQL root password not known, can't continue")
- for database in needed:
- db.select_db(database)
- cur.execute("SHOW TABLES")
- tables = [r[0] for r in cur.fetchall()]
- cur.execute("SET foreign_key_checks = 0")
- for table in tables:
- cur.execute("DROP TABLE %s" % table)
- cur.execute("SET foreign_key_checks = 1")
-
- cur.close()
- db.commit()
- db.close()
+ db = MySQLdb.connect(user = username, passwd = password)
+ cur = db.cursor()
+ cur.execute("SHOW DATABASES")
+ existing = set(r[0] for r in cur.fetchall())
+ if needed - existing:
+ sys.stderr.write("The following databases are missing:\n")
+ for database in sorted(needed - existing):
+ sys.stderr.write(" %s\n" % database)
+ sys.stderr.write("Please create them manually or put MySQL root password in my config file\n")
+ sys.exit("Missing databases and MySQL root password not known, can't continue")
+ for database in needed:
+ db.select_db(database)
+ cur.execute("SHOW TABLES")
+ tables = [r[0] for r in cur.fetchall()]
+ cur.execute("SET foreign_key_checks = 0")
+ for table in tables:
+ cur.execute("DROP TABLE %s" % table)
+ cur.execute("SET foreign_key_checks = 1")
+
+ cur.close()
+ db.commit()
+ db.close()
class timestamp(object):
- def __init__(self, *args):
- self.count = 0
- self.start = self.tick = rpki.sundial.now()
+ def __init__(self, *args):
+ self.count = 0
+ self.start = self.tick = rpki.sundial.now()
- def __call__(self, *args):
- now = rpki.sundial.now()
- if not quiet:
- print "[Count %s last %s total %s now %s]" % (
- self.count, now - self.tick, now - self.start, now)
- self.tick = now
- self.count += 1
+ def __call__(self, *args):
+ now = rpki.sundial.now()
+ if not quiet:
+ print "[Count %s last %s total %s now %s]" % (
+ self.count, now - self.tick, now - self.start, now)
+ self.tick = now
+ self.count += 1
def main():
- global flat_publication
- global config_overrides
- global only_one_pubd
- global loopback
- global dns_suffix
- global mysql_rootuser
- global mysql_rootpass
- global yaml_file
- global test_dir
- global rpki_conf
- global publication_base
- global publication_root
- global quiet
-
- os.environ["TZ"] = "UTC"
- time.tzset()
-
- parser = argparse.ArgumentParser(description = "yamlconf")
- parser.add_argument("-c", "--config", help = "configuration file")
- parser.add_argument("--dns_suffix",
- help = "DNS suffix to add to hostnames")
- parser.add_argument("-l", "--loopback", action = "store_true",
- help = "Configure for use with yamltest on localhost")
- parser.add_argument("-f", "--flat_publication", action = "store_true",
- help = "Use flat publication model")
- parser.add_argument("-q", "--quiet", action = "store_true",
- help = "Work more quietly")
- parser.add_argument("--profile",
- help = "Filename for profile output")
- parser.add_argument("yaml_file", type = argparse.FileType("r"),
- help = "YAML file describing network to build")
- args = parser.parse_args()
-
- dns_suffix = args.dns_suffix
- loopback = args.loopback
- flat_publication = args.flat_publication
- quiet = args.quiet
- yaml_file = args.yaml_file
-
- rpki.log.init("yamlconf", argparse.Namespace(log_level = logging.DEBUG,
- log_handler = lambda: logging.StreamHandler(sys.stdout)))
-
- # Allow optional config file for this tool to override default
- # passwords: this is mostly so that I can show a complete working
- # example without publishing my own server's passwords.
-
- cfg = rpki.config.parser(args.config, "yamlconf", allow_missing = True)
- try:
- cfg.set_global_flags()
- except:
- pass
-
- # Use of "yamltest.dir" is deliberate: intent is for what we write to
- # be usable with "yamltest --skip_config".
-
- only_one_pubd = cfg.getboolean("only_one_pubd", True)
- test_dir = cfg.get("test_directory", cleanpath(this_dir, "yamltest.dir"))
- rpki_conf = cfg.get("rpki_conf", cleanpath(this_dir, "..", "examples/rpki.conf"))
- mysql_rootuser = cfg.get("mysql_rootuser", "root")
-
- try:
- mysql_rootpass = cfg.get("mysql_rootpass")
- except:
- pass
-
- try:
- publication_base = cfg.get("publication_base")
- except:
- pass
-
- try:
- publication_root = cfg.get("publication_root")
- except:
- pass
-
- for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password",
- "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username"):
- if cfg.has_option(k):
- config_overrides[k] = cfg.get(k)
-
- if args.profile:
- import cProfile
- prof = cProfile.Profile()
+ global flat_publication
+ global config_overrides
+ global only_one_pubd
+ global loopback
+ global dns_suffix
+ global mysql_rootuser
+ global mysql_rootpass
+ global yaml_file
+ global test_dir
+ global rpki_conf
+ global publication_base
+ global publication_root
+ global quiet
+
+ os.environ["TZ"] = "UTC"
+ time.tzset()
+
+ parser = argparse.ArgumentParser(description = "yamlconf")
+ parser.add_argument("-c", "--config", help = "configuration file")
+ parser.add_argument("--dns_suffix",
+ help = "DNS suffix to add to hostnames")
+ parser.add_argument("-l", "--loopback", action = "store_true",
+ help = "Configure for use with yamltest on localhost")
+ parser.add_argument("-f", "--flat_publication", action = "store_true",
+ help = "Use flat publication model")
+ parser.add_argument("-q", "--quiet", action = "store_true",
+ help = "Work more quietly")
+ parser.add_argument("--profile",
+ help = "Filename for profile output")
+ parser.add_argument("yaml_file", type = argparse.FileType("r"),
+ help = "YAML file describing network to build")
+ args = parser.parse_args()
+
+ dns_suffix = args.dns_suffix
+ loopback = args.loopback
+ flat_publication = args.flat_publication
+ quiet = args.quiet
+ yaml_file = args.yaml_file
+
+ rpki.log.init("yamlconf", argparse.Namespace(log_level = logging.DEBUG,
+ log_handler = lambda: logging.StreamHandler(sys.stdout)))
+
+ # Allow optional config file for this tool to override default
+ # passwords: this is mostly so that I can show a complete working
+ # example without publishing my own server's passwords.
+
+ cfg = rpki.config.parser(set_filename = args.config, section = "yamlconf", allow_missing = True)
try:
- prof.runcall(body)
- finally:
- prof.dump_stats(args.profile)
- if not quiet:
- print
- print "Dumped profile data to %s" % args.profile
- else:
- body()
-
-def body():
+ cfg.set_global_flags()
+ except:
+ pass
- global rpki
+ # Use of "yamltest.dir" is deliberate: intent is for what we write to
+ # be usable with "yamltest --skip_config".
- ts = timestamp()
+ only_one_pubd = cfg.getboolean("only_one_pubd", True)
+ test_dir = cfg.get("test_directory", cleanpath(this_dir, "yamltest.dir"))
+ rpki_conf = cfg.get("rpki_conf", cleanpath(this_dir, "..", "examples/rpki.conf"))
+ mysql_rootuser = cfg.get("mysql_rootuser", "root")
- for root, dirs, files in os.walk(test_dir, topdown = False):
- for fn in files:
- os.unlink(os.path.join(root, fn))
- for d in dirs:
- os.rmdir(os.path.join(root, d))
+ try:
+ mysql_rootpass = cfg.get("mysql_rootpass")
+ except:
+ pass
- if not quiet:
- print
- print "Reading YAML", yaml_file.name
+ try:
+ publication_base = cfg.get("publication_base")
+ except:
+ pass
- db = allocation_db(yaml.safe_load_all(yaml_file).next())
+ try:
+ publication_root = cfg.get("publication_root")
+ except:
+ pass
- # Show what we loaded
+ for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password",
+ "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username"):
+ if cfg.has_option(k):
+ config_overrides[k] = cfg.get(k)
+
+ if args.profile:
+ import cProfile
+ prof = cProfile.Profile()
+ try:
+ prof.runcall(body)
+ finally:
+ prof.dump_stats(args.profile)
+ if not quiet:
+ print
+ print "Dumped profile data to %s" % args.profile
+ else:
+ body()
- #db.dump()
+def body():
- # Do pre-Django SQL setup
+ global rpki
- pre_django_sql_setup(set(d.irdb_name for d in db if not d.is_hosted))
+ ts = timestamp()
- # Now ready for fun with multiple databases in Django!
+ for root, dirs, files in os.walk(test_dir, topdown = False):
+ for fn in files:
+ os.unlink(os.path.join(root, fn))
+ for d in dirs:
+ os.rmdir(os.path.join(root, d))
- # https://docs.djangoproject.com/en/1.4/topics/db/multi-db/
- # https://docs.djangoproject.com/en/1.4/topics/db/sql/
+ if not quiet:
+ print
+ print "Reading YAML", yaml_file.name
- database_template = {
- "ENGINE" : "django.db.backends.mysql",
- "USER" : config_overrides["irdbd_sql_username"],
- "PASSWORD" : config_overrides["irdbd_sql_password"],
- "HOST" : "",
- "PORT" : "",
- "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }}
+ db = allocation_db(yaml.safe_load_all(yaml_file).next())
- databases = dict((d.irdb_name,
- dict(database_template, NAME = d.irdb_name))
- for d in db if not d.is_hosted)
+ # Show what we loaded
- databases["default"] = databases[db.root.irdb_name]
+ #db.dump()
- import django
+ # Do pre-Django SQL setup
- from django.conf import settings
+ pre_django_sql_setup(set(d.irdb_name for d in db if not d.is_hosted))
- settings.configure(
- DATABASES = databases,
- DATABASE_ROUTERS = ["rpki.irdb.router.DBContextRouter"],
- MIDDLEWARE_CLASSES = (),
- INSTALLED_APPS = ("rpki.irdb",))
+ # Now ready for fun with multiple databases in Django!
+ #
+ # https://docs.djangoproject.com/en/1.4/topics/db/multi-db/
+ # https://docs.djangoproject.com/en/1.4/topics/db/sql/
+ #
+ # This program's use of the ORM is sufficiently different that it's
+ # not worth straining to use rpki.django_settings, so we just use
+ # Django's settings API directly.
- if django.VERSION >= (1, 7): # API change, feh
- from django.apps import apps
- apps.populate(settings.INSTALLED_APPS)
+ database_template = {
+ "ENGINE" : "django.db.backends.mysql",
+ "USER" : config_overrides["irdbd_sql_username"],
+ "PASSWORD" : config_overrides["irdbd_sql_password"],
+ "HOST" : "",
+ "PORT" : "",
+ "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }}
- import rpki.irdb
+ databases = dict((d.irdb_name, dict(database_template, NAME = d.irdb_name))
+ for d in db if not d.is_hosted)
- rpki.irdb.models.ca_certificate_lifetime = rpki.sundial.timedelta(days = 3652 * 2)
- rpki.irdb.models.ee_certificate_lifetime = rpki.sundial.timedelta(days = 3652)
+ databases["default"] = databases[db.root.irdb_name]
- ts()
+ import django
+ django.setup()
- for d in db:
- if not quiet:
- print
- print "Configuring", d.name
-
- if not d.is_hosted:
- d.mkdir()
- if d.runs_pubd:
- d.mkdir("publication")
- if d.is_root:
- d.mkdir("publication.root")
-
- if not d.is_hosted:
- d.dump_conf()
- d.dump_rsyncd()
-
- d.dump_asns("%s.asns.csv" % d.name)
- d.dump_prefixes("%s.prefixes.csv" % d.name)
- d.dump_roas("%s.roas.csv" % d.name)
- d.dump_ghostbusters("%s.ghostbusters.vcard" % d.name)
- d.dump_router_certificates("%s.routercerts.xml" % d.name)
-
- if not d.is_hosted:
- if not quiet:
- print "Initializing SQL"
- d.syncdb()
- if not quiet:
- print "Hiring zookeeper"
- d.hire_zookeeper()
-
- with d.irdb:
- if not quiet:
- print "Creating identity"
- x = d.zoo.initialize()
-
- if d.is_root:
- if not quiet:
- print "Creating RPKI root certificate and TAL"
- d.dump_root()
- x = d.zoo.configure_rootd()
+ from django.conf import settings
- else:
- with d.parent.irdb:
- x = d.parent.zoo.configure_child(x.file)[0]
- x = d.zoo.configure_parent(x.file)[0]
+ settings.configure(
+ DATABASES = databases,
+ DATABASE_ROUTERS = ["rpki.irdb.router.DBContextRouter"],
+ INSTALLED_APPS = ["rpki.irdb"])
- with d.pubd.irdb:
- x = d.pubd.zoo.configure_publication_client(x.file, flat = flat_publication)[0]
- d.zoo.configure_repository(x.file)
+ import rpki.irdb
- if loopback and not d.is_hosted:
- with d.irdb:
- d.zoo.write_bpki_files()
+ rpki.irdb.models.ca_certificate_lifetime = rpki.sundial.timedelta(days = 3652 * 2)
+ rpki.irdb.models.ee_certificate_lifetime = rpki.sundial.timedelta(days = 3652)
ts()
- if not loopback:
- if not quiet:
- print
for d in db:
- d.dump_sql()
+ if not quiet:
+ print
+ print "Configuring", d.name
+
+ if not d.is_hosted:
+ d.mkdir()
+ if d.runs_pubd:
+ d.mkdir("publication")
+ if d.is_root:
+ d.mkdir("publication.root")
+
+ if not d.is_hosted:
+ d.dump_conf()
+ d.dump_rsyncd()
+
+ d.dump_asns("%s.asns.csv" % d.name)
+ d.dump_prefixes("%s.prefixes.csv" % d.name)
+ d.dump_roas("%s.roas.csv" % d.name)
+ d.dump_ghostbusters("%s.ghostbusters.vcard" % d.name)
+ d.dump_router_certificates("%s.routercerts.xml" % d.name)
+
+ if not d.is_hosted:
+ if not quiet:
+ print "Initializing SQL"
+ d.syncdb()
+ if not quiet:
+ print "Hiring zookeeper"
+ d.hire_zookeeper()
+
+ with d.irdb:
+ if not quiet:
+ print "Creating identity"
+ x = d.zoo.initialize()
+
+ if d.is_root:
+ if not quiet:
+ print "Creating RPKI root certificate and TAL"
+ d.dump_root()
+ x = d.zoo.configure_rootd()
+
+ else:
+ with d.parent.irdb:
+ x = d.parent.zoo.configure_child(x.file)[0]
+ x = d.zoo.configure_parent(x.file)[0]
+
+ with d.pubd.irdb:
+ x = d.pubd.zoo.configure_publication_client(x.file, flat = flat_publication)[0]
+ d.zoo.configure_repository(x.file)
+
+ if loopback and not d.is_hosted:
+ with d.irdb:
+ d.zoo.write_bpki_files()
+
+ ts()
+
+ if not loopback:
+ if not quiet:
+ print
+ for d in db:
+ d.dump_sql()
if __name__ == "__main__":
- main()
+ main()
diff --git a/ca/tests/yamltest-test-all.sh b/ca/tests/yamltest-test-all.sh
index 4bd5c560..54224d5e 100644
--- a/ca/tests/yamltest-test-all.sh
+++ b/ca/tests/yamltest-test-all.sh
@@ -24,24 +24,28 @@ test -z "$STY" && exec screen -L sh $0
screen -X split
screen -X focus
-: ${runtime=900}
+# Timers
+: ${startup=600} ${runtime=900} ${poll=30} ${shutdown=30}
+
+# Once upon a time we had a settitle program. Noop for now.
+: ${settitle=":"}
for yaml in smoketest.*.yaml
do
- settitle "$yaml: Starting"
+ $settitle "$yaml: Starting"
rm -rf test rcynic-data
python sql-cleaner.py
now=$(date +%s)
- finish=$(($now + $runtime))
+ finish=$(($now + $startup + $runtime))
title="$yaml: will finish at $(date -r $finish)"
- settitle "$title"
- screen sh -c "settitle '$title'; exec python yamltest.py -p yamltest.pid $yaml"
+ $settitle "$title"
+ screen sh -c "$settitle '$title'; exec python yamltest.py -p yamltest.pid $yaml"
date
- sleep 180
+ sleep $startup
date
while test $(date +%s) -lt $finish
do
- sleep 30
+ sleep $poll
date
../../rp/rcynic/rcynic
../../rp/rcynic/rcynic-text rcynic.xml
@@ -49,10 +53,8 @@ do
date
echo "$title"
done
- if test -r yamltest.pid
- then
- kill -INT $(cat yamltest.pid)
- sleep 30
- fi
+ if test -r yamltest.pid; then kill -INT $(cat yamltest.pid); sleep ${shutdown}; fi
+ if test -r yamltest.pid; then kill -INT $(cat yamltest.pid); sleep ${shutdown}; fi
+ if test -r yamltest.pid; then kill -KILL $(cat yamltest.pid); sleep ${shutdown}; fi
make backup
done
diff --git a/ca/tests/yamltest.py b/ca/tests/yamltest.py
index 0932049b..a7ee6540 100644..100755
--- a/ca/tests/yamltest.py
+++ b/ca/tests/yamltest.py
@@ -36,13 +36,12 @@ and waits for one of them to exit.
# running daemons.
#
-# pylint: disable=W0702,W0621
-
import subprocess
import re
import os
import logging
import argparse
+import webbrowser
import sys
import yaml
import signal
@@ -56,6 +55,8 @@ import rpki.csv_utils
import rpki.x509
import rpki.relaxng
+# pylint: disable=W0621
+
# Nasty regular expressions for parsing config files. Sadly, while
# the Python ConfigParser supports writing config files, it does so in
# such a limited way that it's easier just to hack this ourselves.
@@ -64,585 +65,680 @@ section_regexp = re.compile(r"\s*\[\s*(.+?)\s*\]\s*$")
variable_regexp = re.compile(r"\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$")
def cleanpath(*names):
- """
- Construct normalized pathnames.
- """
- return os.path.normpath(os.path.join(*names))
+ """
+ Construct normalized pathnames.
+ """
+
+ return os.path.normpath(os.path.join(*names))
# Pathnames for various things we need
this_dir = os.getcwd()
test_dir = cleanpath(this_dir, "yamltest.dir")
-rpkid_dir = cleanpath(this_dir, "..")
+ca_dir = cleanpath(this_dir, "..")
-prog_rpkic = cleanpath(rpkid_dir, "rpkic")
-prog_rpkid = cleanpath(rpkid_dir, "rpkid")
-prog_irdbd = cleanpath(rpkid_dir, "irdbd")
-prog_pubd = cleanpath(rpkid_dir, "pubd")
-prog_rootd = cleanpath(rpkid_dir, "rootd")
+prog_rpkic = cleanpath(ca_dir, "rpkic")
+prog_rpkid = cleanpath(ca_dir, "rpkid")
+prog_irdbd = cleanpath(ca_dir, "irdbd")
+prog_pubd = cleanpath(ca_dir, "pubd")
+prog_rootd = cleanpath(ca_dir, "rootd")
+prog_rpki_manage = cleanpath(ca_dir, "rpki-manage")
class roa_request(object):
- """
- Representation of a ROA request.
- """
-
- def __init__(self, asn, ipv4, ipv6):
- self.asn = asn
- self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
- self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
-
- def __eq__(self, other):
- return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
-
- def __hash__(self):
- v4 = tuple(self.v4) if self.v4 is not None else None
- v6 = tuple(self.v6) if self.v6 is not None else None
- return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
-
- def __str__(self):
- if self.v4 and self.v6:
- return "%s: %s,%s" % (self.asn, self.v4, self.v6)
- else:
- return "%s: %s" % (self.asn, self.v4 or self.v6)
-
- @classmethod
- def parse(cls, y):
"""
- Parse a ROA request from YAML format.
+ Representation of a ROA request.
"""
- return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6"))
+ def __init__(self, asn, ipv4, ipv6):
+ self.asn = asn
+ self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None
+ self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None
-class router_cert(object):
- """
- Representation for a router_cert object.
- """
-
- _ecparams = None
-
- @classmethod
- def ecparams(cls):
- if cls._ecparams is None:
- cls._ecparams = rpki.x509.KeyParams.generateEC()
- return cls._ecparams
+ def __eq__(self, other):
+ return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6
- def __init__(self, asn, router_id):
- self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
- self.router_id = router_id
- self.keypair = rpki.x509.ECDSA.generate(self.ecparams())
- self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
- self.gski = self.pkcs10.gSKI()
+ def __hash__(self):
+ v4 = tuple(self.v4) if self.v4 is not None else None
+ v6 = tuple(self.v6) if self.v6 is not None else None
+ return self.asn.__hash__() + v4.__hash__() + v6.__hash__()
- def __eq__(self, other):
- return self.asn == other.asn and self.router_id == other.router_id and self.gski == other.gski
+ def __str__(self):
+ if self.v4 and self.v6:
+ return "%s: %s,%s" % (self.asn, self.v4, self.v6)
+ else:
+ return "%s: %s" % (self.asn, self.v4 or self.v6)
- def __hash__(self):
- return tuple(self.asn).__hash__() + self.router_id.__hash__() + self.gski.__hash__()
+ @classmethod
+ def parse(cls, y):
+ """
+ Parse a ROA request from YAML format.
+ """
- def __str__(self):
- return "%s: %s: %s" % (self.asn, self.router_id, self.gski)
+ return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6"))
- @classmethod
- def parse(cls, yaml):
- return cls(yaml.get("asn"), yaml.get("router_id"))
-class allocation_db(list):
- """
- Our allocation database.
- """
-
- def __init__(self, yaml):
- list.__init__(self)
- self.root = allocation(yaml, self)
- assert self.root.is_root
- if self.root.crl_interval is None:
- self.root.crl_interval = 60 * 60
- if self.root.regen_margin is None:
- self.root.regen_margin = 24 * 60 * 60
- if self.root.base.valid_until is None:
- self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2)
- for a in self:
- if a.base.valid_until is None:
- a.base.valid_until = a.parent.base.valid_until
- if a.crl_interval is None:
- a.crl_interval = a.parent.crl_interval
- if a.regen_margin is None:
- a.regen_margin = a.parent.regen_margin
- self.root.closure()
- self.map = dict((a.name, a) for a in self)
- for a in self:
- if a.is_hosted:
- a.hosted_by = self.map[a.hosted_by]
- a.hosted_by.hosts.append(a)
- assert not a.is_root and not a.hosted_by.is_hosted
-
- def dump(self):
+class router_cert(object):
"""
- Show contents of allocation database.
+ Representation for a router_cert object.
"""
- for a in self:
- a.dump()
+ _ecparams = None
-class allocation(object):
- """
- One entity in our allocation database. Every entity in the database
- is assumed to hold resources, so needs at least rpkic services.
- Entities that don't have the hosted_by property run their own copies
- of rpkid, irdbd, and pubd, so they also need myirbe services.
- """
-
- base_port = None
- parent = None
- crl_interval = None
- regen_margin = None
- rootd_port = None
- engine = -1
- rpkid_port = -1
- irdbd_port = -1
- pubd_port = -1
- rsync_port = -1
- rootd_port = -1
- rpkic_counter = 0L
-
- @classmethod
- def allocate_port(cls):
- """
- Allocate a TCP port.
- """
- cls.base_port += 1
- return cls.base_port
+ @classmethod
+ def ecparams(cls):
+ if cls._ecparams is None:
+ cls._ecparams = rpki.x509.KeyParams.generateEC()
+ return cls._ecparams
- base_engine = -1
+ def __init__(self, asn, router_id):
+ self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split()))
+ self.router_id = router_id
+ self.keypair = rpki.x509.ECDSA.generate(params = self.ecparams(), quiet = True)
+ self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair)
+ self.gski = self.pkcs10.gSKI()
- @classmethod
- def allocate_engine(cls):
- """
- Allocate an engine number, mostly used to construct MySQL database
- names.
- """
- cls.base_engine += 1
- return cls.base_engine
-
- def __init__(self, yaml, db, parent = None):
- db.append(self)
- self.name = yaml["name"]
- self.parent = parent
- self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
- valid_until = None
- if "valid_until" in yaml:
- valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until"))
- if valid_until is None and "valid_for" in yaml:
- valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
- self.base = rpki.resource_set.resource_bag(
- asn = rpki.resource_set.resource_set_as(yaml.get("asn")),
- v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
- v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
- valid_until = valid_until)
- if "crl_interval" in yaml:
- self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
- if "regen_margin" in yaml:
- self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
- self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
- self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())]
- if "ghostbusters" in yaml:
- self.ghostbusters = yaml.get("ghostbusters")
- elif "ghostbuster" in yaml:
- self.ghostbusters = [yaml.get("ghostbuster")]
- else:
- self.ghostbusters = []
- for r in self.roa_requests:
- if r.v4:
- self.base.v4 |= r.v4.to_resource_set()
- if r.v6:
- self.base.v6 |= r.v6.to_resource_set()
- for r in self.router_certs:
- self.base.asn |= r.asn
- self.hosted_by = yaml.get("hosted_by")
- self.hosts = []
- if not self.is_hosted:
- self.engine = self.allocate_engine()
- self.rpkid_port = self.allocate_port()
- self.irdbd_port = self.allocate_port()
- if self.runs_pubd:
- self.pubd_port = self.allocate_port()
- self.rsync_port = self.allocate_port()
- if self.is_root:
- self.rootd_port = self.allocate_port()
-
- def closure(self):
- """
- Compute resource closure of this node and its children, to avoid a
- lot of tedious (and error-prone) duplication in the YAML file.
- """
- resources = self.base
- for kid in self.kids:
- resources |= kid.closure()
- self.resources = resources
- return resources
+ def __eq__(self, other):
+ return self.asn == other.asn and self.router_id == other.router_id and self.gski == other.gski
- def dump(self):
- """
- Show content of this allocation node.
- """
- print str(self)
-
- def __str__(self):
- s = self.name + ":\n"
- if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn
- if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
- if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
- if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
- if self.parent: s += " Up: %s\n" % self.parent.name
- if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name
- if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts)
- for r in self.roa_requests: s += " ROA: %s\n" % r
- if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port
- if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port
- if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port
- if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port
- if self.is_root: s += " TPort: %s\n" % self.rootd_port
- return s + " Until: %s\n" % self.resources.valid_until
-
- @property
- def is_root(self):
- """
- Is this the root node?
- """
- return self.parent is None
+ def __hash__(self):
+ return tuple(self.asn).__hash__() + self.router_id.__hash__() + self.gski.__hash__()
- @property
- def is_hosted(self):
- """
- Is this entity hosted?
- """
- return self.hosted_by is not None
+ def __str__(self):
+ return "%s: %s: %s" % (self.asn, self.router_id, self.gski)
- @property
- def runs_pubd(self):
- """
- Does this entity run a pubd?
- """
- return self.is_root or not (self.is_hosted or only_one_pubd)
+ @classmethod
+ def parse(cls, yaml):
+ return cls(yaml.get("asn"), yaml.get("router_id"))
- def path(self, *names):
+class allocation_db(list):
"""
- Construct pathnames in this entity's test directory.
+ Our allocation database.
"""
- return cleanpath(test_dir, self.host.name, *names)
- def csvout(self, fn):
- """
- Open and log a CSV output file.
- """
- path = self.path(fn)
- print "Writing", path
- return rpki.csv_utils.csv_writer(path)
+ def __init__(self, yaml):
+ list.__init__(self)
+ self.root = allocation(yaml, self)
+ assert self.root.is_root and not any(a.is_root for a in self if a is not self.root) and self[0] is self.root
+ if self.root.crl_interval is None:
+ self.root.crl_interval = 60 * 60
+ if self.root.regen_margin is None:
+ self.root.regen_margin = 24 * 60 * 60
+ if self.root.base.valid_until is None:
+ self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2)
+ for a in self:
+ if a.base.valid_until is None:
+ a.base.valid_until = a.parent.base.valid_until
+ if a.crl_interval is None:
+ a.crl_interval = a.parent.crl_interval
+ if a.regen_margin is None:
+ a.regen_margin = a.parent.regen_margin
+ self.root.closure()
+ self.map = dict((a.name, a) for a in self)
+ for a in self:
+ if a.is_hosted:
+ a.hosted_by = self.map[a.hosted_by]
+ a.hosted_by.hosts.append(a)
+ assert not a.is_root and not a.hosted_by.is_hosted
+
+ def dump(self):
+ """
+ Show contents of allocation database.
+ """
+
+ for a in self:
+ a.dump()
- def up_down_url(self):
- """
- Construct service URL for this node's parent.
- """
- return "http://localhost:%d/up-down/%s/%s" % (self.parent.host.rpkid_port,
- self.parent.name,
- self.name)
- def dump_asns(self):
- """
- Write Autonomous System Numbers CSV file.
- """
- fn = "%s.asns.csv" % d.name
- if not args.skip_config:
- f = self.csvout(fn)
- for k in self.kids:
- f.writerows((k.name, a) for a in k.resources.asn)
- f.close()
- if not args.stop_after_config:
- self.run_rpkic("load_asns", fn)
-
- def dump_prefixes(self):
- """
- Write prefixes CSV file.
- """
- fn = "%s.prefixes.csv" % d.name
- if not args.skip_config:
- f = self.csvout(fn)
- for k in self.kids:
- f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6))
- f.close()
- if not args.stop_after_config:
- self.run_rpkic("load_prefixes", fn)
-
- def dump_roas(self):
- """
- Write ROA CSV file.
- """
- fn = "%s.roas.csv" % d.name
- if not args.skip_config:
- f = self.csvout(fn)
- for g1, r in enumerate(self.roa_requests):
- f.writerows((p, r.asn, "G%08d%08d" % (g1, g2))
- for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ())))
- f.close()
- if not args.stop_after_config:
- self.run_rpkic("load_roa_requests", fn)
-
- def dump_ghostbusters(self):
- """
- Write Ghostbusters vCard file.
- """
- if self.ghostbusters:
- fn = "%s.ghostbusters.vcard" % d.name
- if not args.skip_config:
- path = self.path(fn)
- print "Writing", path
- f = open(path, "w")
- for i, g in enumerate(self.ghostbusters):
- if i:
- f.write("\n")
- f.write(g)
- f.close()
- if not args.stop_after_config:
- self.run_rpkic("load_ghostbuster_requests", fn)
-
- def dump_router_certificates(self):
+class allocation(object):
"""
- Write EE certificates (router certificates, etc).
+ One entity in our allocation database. Every entity in the database
+ is assumed to hold resources, so needs at least rpkic services.
+ Entities that don't have the hosted_by property run their own copies
+ of rpkid, irdbd, and pubd, so they also need myirbe services.
"""
- if self.router_certs:
- fn = "%s.routercerts.xml" % d.name
- if not args.skip_config:
- path = self.path(fn)
- print "Writing", path
- xmlns = rpki.relaxng.router_certificate.xmlns
- xml = lxml.etree.Element(xmlns + "router_certificate_requests",
- version = rpki.relaxng.router_certificate.version,
- nsmap = rpki.relaxng.router_certificate.nsmap)
- for r in self.router_certs:
- x = lxml.etree.SubElement(xml, xmlns + "router_certificate_request",
- router_id = str(r.router_id),
- asn = str(r.asn),
- valid_until = str(self.resources.valid_until))
- x.text = r.pkcs10.get_Base64()
- rpki.relaxng.router_certificate.assertValid(xml)
- lxml.etree.ElementTree(xml).write(path, pretty_print = True)
- if not args.stop_after_config:
- self.run_rpkic("add_router_certificate_request", fn)
- if not args.skip_config and args.store_router_private_keys:
- path = self.path("%s.routercerts.keys" % d.name)
- print "Writing", path
- with open(path, "w") as f:
- for r in self.router_certs:
- f.write(r.keypair.get_PEM())
- @property
- def pubd(self):
- """
- Walk up tree until we find somebody who runs pubd.
- """
- s = self
- while not s.runs_pubd:
- s = s.parent
- return s
+ base_port = None
+ parent = None
+ crl_interval = None
+ regen_margin = None
+ rootd_port = None
+ engine = -1
+ rpkid_port = -1
+ irdbd_port = -1
+ pubd_port = -1
+ rsync_port = -1
+ rootd_port = -1
+ rrdp_port = -1
+ rpkic_counter = 0L
+
+ @classmethod
+ def allocate_port(cls):
+ """
+ Allocate a TCP port.
+ """
+
+ cls.base_port += 1
+ return cls.base_port
+
+ base_engine = -1
+
+ @classmethod
+ def allocate_engine(cls):
+ """
+ Allocate an engine number, mostly used to construct SQL database
+ names.
+ """
+
+ cls.base_engine += 1
+ return cls.base_engine
+
+ def __init__(self, yaml, db, parent = None):
+ db.append(self)
+ self.name = yaml["name"]
+ self.parent = parent
+ self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())]
+ valid_until = None
+ if "valid_until" in yaml:
+ valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until"))
+ if valid_until is None and "valid_for" in yaml:
+ valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"])
+ self.base = rpki.resource_set.resource_bag(
+ asn = rpki.resource_set.resource_set_as(yaml.get("asn")),
+ v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")),
+ v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")),
+ valid_until = valid_until)
+ if "crl_interval" in yaml:
+ self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds()
+ if "regen_margin" in yaml:
+ self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds()
+ self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))]
+ self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())]
+ if "ghostbusters" in yaml:
+ self.ghostbusters = yaml.get("ghostbusters")
+ elif "ghostbuster" in yaml:
+ self.ghostbusters = [yaml.get("ghostbuster")]
+ else:
+ self.ghostbusters = []
+ for r in self.roa_requests:
+ if r.v4:
+ self.base.v4 |= r.v4.to_resource_set()
+ if r.v6:
+ self.base.v6 |= r.v6.to_resource_set()
+ for r in self.router_certs:
+ self.base.asn |= r.asn
+ self.hosted_by = yaml.get("hosted_by")
+ self.hosts = []
+ if not self.is_hosted:
+ self.engine = self.allocate_engine()
+ self.rpkid_port = self.allocate_port()
+ self.irdbd_port = self.allocate_port()
+ if self.runs_pubd:
+ self.pubd_port = self.allocate_port()
+ self.rsync_port = self.allocate_port()
+ self.rrdp_port = self.allocate_port()
+ if self.is_root:
+ self.rootd_port = self.allocate_port()
+
+ def closure(self):
+ """
+ Compute resource closure of this node and its children, to avoid a
+ lot of tedious (and error-prone) duplication in the YAML file.
+ """
+
+ resources = self.base
+ for kid in self.kids:
+ resources |= kid.closure()
+ self.resources = resources # pylint: disable=W0201
+ return resources
+
+ def dump(self):
+ """
+ Show content of this allocation node.
+ """
+
+ print str(self)
+
+ def __str__(self):
+ # pylint: disable=C0321
+ s = self.name + ":\n"
+ if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn
+ if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4
+ if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6
+ if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids)
+ if self.parent: s += " Up: %s\n" % self.parent.name
+ if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name
+ if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts)
+ for r in self.roa_requests: s += " ROA: %s\n" % r
+ if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port
+ if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port
+ if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port
+ if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port
+ if self.is_root: s += " TPort: %s\n" % self.rootd_port
+ return s + " Until: %s\n" % self.resources.valid_until
+
+ @property
+ def is_root(self):
+ """
+ Is this the root node?
+ """
+
+ return self.parent is None
+
+ @property
+ def is_hosted(self):
+ """
+ Is this entity hosted?
+ """
+
+ return self.hosted_by is not None
+
+ @property
+ def runs_pubd(self):
+ """
+ Does this entity run a pubd?
+ """
+
+ return self.is_root or not (self.is_hosted or only_one_pubd)
+
+ def path(self, *names):
+ """
+ Construct pathnames in this entity's test directory.
+ """
+
+ return cleanpath(test_dir, self.host.name, *names)
+
+ def csvout(self, fn):
+ """
+ Open and log a CSV output file.
+ """
- @property
- def client_handle(self):
- """
- Work out what pubd configure_publication_client will call us.
- """
- path = []
- s = self
- if not args.flat_publication:
- while not s.runs_pubd:
+ path = self.path(fn)
+ print "Writing", path
+ return rpki.csv_utils.csv_writer(path)
+
+ def up_down_url(self):
+ """
+ Construct service URL for this node's parent.
+ """
+
+ return "http://localhost:%d/up-down/%s/%s" % (self.parent.host.rpkid_port,
+ self.parent.name,
+ self.name)
+
+ def dump_asns(self):
+ """
+ Write Autonomous System Numbers CSV file.
+ """
+
+ fn = "%s.asns.csv" % d.name
+ if not args.skip_config:
+ with self.csvout(fn) as f:
+ for k in self.kids:
+ f.writerows((k.name, a) for a in k.resources.asn)
+ if not args.stop_after_config:
+ self.run_rpkic("load_asns", fn)
+
+ def dump_prefixes(self):
+ """
+ Write prefixes CSV file.
+ """
+
+ fn = "%s.prefixes.csv" % d.name
+ if not args.skip_config:
+ with self.csvout(fn) as f:
+ for k in self.kids:
+ f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6))
+ if not args.stop_after_config:
+ self.run_rpkic("load_prefixes", fn)
+
+ def dump_roas(self):
+ """
+ Write ROA CSV file.
+ """
+
+ fn = "%s.roas.csv" % d.name
+ if not args.skip_config:
+ with self.csvout(fn) as f:
+ for g1, r in enumerate(self.roa_requests):
+ f.writerows((p, r.asn, "G%08d%08d" % (g1, g2))
+ for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ())))
+ if not args.stop_after_config:
+ self.run_rpkic("load_roa_requests", fn)
+
+ def dump_ghostbusters(self):
+ """
+ Write Ghostbusters vCard file.
+ """
+
+ if self.ghostbusters:
+ fn = "%s.ghostbusters.vcard" % d.name
+ if not args.skip_config:
+ path = self.path(fn)
+ print "Writing", path
+ with open(path, "w") as f:
+ f.write("\n".join(self.ghostbusters))
+ if not args.stop_after_config:
+ self.run_rpkic("load_ghostbuster_requests", fn)
+
+ def dump_router_certificates(self):
+ """
+ Write EE certificates (router certificates, etc).
+ """
+
+ if self.router_certs:
+ fn = "%s.routercerts.xml" % d.name
+ if not args.skip_config:
+ path = self.path(fn)
+ print "Writing", path
+ xmlns = rpki.relaxng.router_certificate.xmlns
+ xml = lxml.etree.Element(xmlns + "router_certificate_requests",
+ version = rpki.relaxng.router_certificate.version,
+ nsmap = rpki.relaxng.router_certificate.nsmap)
+ for r in self.router_certs:
+ x = lxml.etree.SubElement(xml, xmlns + "router_certificate_request",
+ router_id = str(r.router_id),
+ asn = str(r.asn),
+ valid_until = str(self.resources.valid_until))
+ x.text = r.pkcs10.get_Base64()
+ rpki.relaxng.router_certificate.assertValid(xml)
+ lxml.etree.ElementTree(xml).write(path, pretty_print = True)
+ if not args.stop_after_config:
+ self.run_rpkic("add_router_certificate_request", fn)
+ if not args.skip_config and args.store_router_private_keys:
+ path = self.path("%s.routercerts.keys" % d.name)
+ print "Writing", path
+ with open(path, "w") as f:
+ for r in self.router_certs:
+ f.write(r.keypair.get_PEM())
+
+ @property
+ def pubd(self):
+ """
+ Walk up tree until we find somebody who runs pubd.
+ """
+
+ s = self
+ while not s.runs_pubd:
+ s = s.parent
+ return s
+
+ @property
+ def client_handle(self):
+ """
+ Work out what pubd configure_publication_client will call us.
+ """
+
+ path = []
+ s = self
+ if not args.flat_publication:
+ while not s.runs_pubd:
+ path.append(s)
+ s = s.parent
path.append(s)
- s = s.parent
- path.append(s)
- return ".".join(i.name for i in reversed(path))
-
- @property
- def host(self):
- return self.hosted_by or self
-
- def dump_conf(self):
- """
- Write configuration file for OpenSSL and RPKI tools.
- """
-
- r = dict(
- handle = self.name,
- run_rpkid = str(not self.is_hosted),
- run_pubd = str(self.runs_pubd),
- run_rootd = str(self.is_root),
- irdbd_sql_database = "irdb%d" % self.engine,
- irdbd_sql_username = "irdb",
- rpkid_sql_database = "rpki%d" % self.engine,
- rpkid_sql_username = "rpki",
- rpkid_server_host = "localhost",
- rpkid_server_port = str(self.rpkid_port),
- irdbd_server_host = "localhost",
- irdbd_server_port = str(self.irdbd_port),
- rootd_server_port = str(self.rootd_port),
- pubd_sql_database = "pubd%d" % self.engine,
- pubd_sql_username = "pubd",
- pubd_server_host = "localhost",
- pubd_server_port = str(self.pubd.pubd_port),
- publication_rsync_server = "localhost:%s" % self.pubd.rsync_port,
- bpki_servers_directory = self.path(),
- publication_base_directory = self.path("publication"),
- shared_sql_password = "fnord")
-
- r.update(config_overrides)
-
- f = open(self.path("rpki.conf"), "w")
- f.write("# Automatically generated, do not edit\n")
- print "Writing", f.name
-
- section = None
- for line in open(cleanpath(rpkid_dir, "examples/rpki.conf")):
- m = section_regexp.match(line)
- if m:
- section = m.group(1)
- m = variable_regexp.match(line)
- option = m.group(1) if m and section == "myrpki" else None
- if option and option in r:
- line = "%s = %s\n" % (option, r[option])
- f.write(line)
-
- f.close()
-
- def dump_rsyncd(self):
- """
- Write rsyncd configuration file.
- """
-
- if self.runs_pubd:
- f = open(self.path("rsyncd.conf"), "w")
- print "Writing", f.name
- f.writelines(s + "\n" for s in
- ("# Automatically generated, do not edit",
- "port = %d" % self.rsync_port,
- "address = localhost",
- "[rpki]",
- "log file = rsyncd.log",
- "read only = yes",
- "use chroot = no",
- "path = %s" % self.path("publication"),
- "comment = RPKI test",
- "[root]",
- "log file = rsyncd_root.log",
- "read only = yes",
- "use chroot = no",
- "path = %s" % self.path("publication.root"),
- "comment = RPKI test root"))
- f.close()
-
- @classmethod
- def next_rpkic_counter(cls):
- cls.rpkic_counter += 10000
- return str(cls.rpkic_counter)
-
- def run_rpkic(self, *argv):
- """
- Run rpkic for this entity.
- """
- cmd = [prog_rpkic, "-i", self.name, "-c", self.path("rpki.conf")]
- if args.profile:
- cmd.append("--profile")
- cmd.append(self.path("rpkic.%s.prof" % rpki.sundial.now()))
- cmd.extend(str(a) for a in argv if a is not None)
- print 'Running "%s"' % " ".join(cmd)
- env = os.environ.copy()
- env["YAMLTEST_RPKIC_COUNTER"] = self.next_rpkic_counter()
- subprocess.check_call(cmd, cwd = self.host.path(), env = env)
-
- def run_python_daemon(self, prog):
- """
- Start a Python daemon and return a subprocess.Popen object
- representing the running daemon.
- """
- basename = os.path.splitext(os.path.basename(prog))[0]
- cmd = [prog, "--foreground", "--log-level", "debug",
- "--log-file", self.path(basename + ".log"),
- "--config", self.path("rpki.conf")]
- if args.profile and basename != "rootd":
- cmd.extend((
- "--profile", self.path(basename + ".prof")))
- p = subprocess.Popen(cmd, cwd = self.path())
- print 'Running %s for %s: pid %d process %r' % (" ".join(cmd), self.name, p.pid, p)
- return p
-
- def run_rpkid(self):
- """
- Run rpkid.
- """
- return self.run_python_daemon(prog_rpkid)
-
- def run_irdbd(self):
- """
- Run irdbd.
- """
- return self.run_python_daemon(prog_irdbd)
+ return ".".join(i.name for i in reversed(path))
+
+ @property
+ def host(self):
+ return self.hosted_by or self
+
+ def dump_conf(self):
+ """
+ Write configuration file for OpenSSL and RPKI tools.
+ """
+
+ r = dict(
+ handle = self.name,
+ run_rpkid = str(not self.is_hosted),
+ run_pubd = str(self.runs_pubd),
+ run_rootd = str(self.is_root),
+ rpkid_server_host = "localhost",
+ rpkid_server_port = str(self.rpkid_port),
+ irdbd_server_host = "localhost",
+ irdbd_server_port = str(self.irdbd_port),
+ rootd_server_port = str(self.rootd_port),
+ pubd_server_host = "localhost",
+ pubd_server_port = str(self.pubd.pubd_port),
+ publication_rsync_server = "localhost:%s" % self.pubd.rsync_port,
+ publication_rrdp_notification_uri = "http://localhost:%s/rrdp/notify.xml" % self.pubd.rrdp_port,
+ bpki_servers_directory = self.path(),
+ publication_base_directory = self.path("publication"),
+ rrdp_publication_base_directory = self.path("rrdp-publication"),
+ shared_sql_engine = args.sql_engine,
+ shared_sql_password = "fnord",
+ irdbd_sql_username = "irdb",
+ rpkid_sql_username = "rpki",
+ pubd_sql_username = "pubd")
+
+ if args.sql_engine == "sqlite3":
+ r.update(
+ irdbd_sql_database = self.path("irdb.sqlite3"),
+ rpkid_sql_database = self.path("rpkidb.sqlite3"),
+ pubd_sql_database = self.path("pubdb.sqlite3"))
+ else:
+ r.update(
+ irdbd_sql_database = "irdb%d" % self.engine,
+ rpkid_sql_database = "rpki%d" % self.engine,
+ pubd_sql_database = "pubd%d" % self.engine)
+
+ r.update(config_overrides)
+
+ with open(self.path("rpki.conf"), "w") as f:
+ f.write("# Automatically generated, do not edit\n")
+ print "Writing", f.name
+
+ section = None
+ for line in open(cleanpath(ca_dir, "examples/rpki.conf")):
+ m = section_regexp.match(line)
+ if m:
+ section = m.group(1)
+ m = variable_regexp.match(line)
+ option = m.group(1) if m and section == "myrpki" else None
+ if option and option in r:
+ line = "%s = %s\n" % (option, r[option])
+ f.write(line)
+
+ def dump_rsyncd(self):
+ """
+ Write rsyncd configuration file.
+ """
+
+ if self.runs_pubd:
+ with open(self.path("rsyncd.conf"), "w") as f:
+ print "Writing", f.name
+ f.writelines(s + "\n" for s in
+ ("# Automatically generated, do not edit",
+ "port = %d" % self.rsync_port,
+ "address = localhost",
+ "[rpki]",
+ "log file = rsyncd.log",
+ "read only = yes",
+ "use chroot = no",
+ "path = %s" % self.path("publication"),
+ "comment = RPKI test",
+ "[root]",
+ "log file = rsyncd_root.log",
+ "read only = yes",
+ "use chroot = no",
+ "path = %s" % self.path("publication.root"),
+ "comment = RPKI test root"))
+
+ @classmethod
+ def next_rpkic_counter(cls):
+ cls.rpkic_counter += 10000
+ return str(cls.rpkic_counter)
+
+ def run_rpkic(self, *argv):
+ """
+ Run rpkic for this entity.
+ """
+
+ cmd = [prog_rpkic, "-i", self.name]
+ if args.profile:
+ cmd.append("--profile")
+ cmd.append(self.path("rpkic.%s.prof" % rpki.sundial.now()))
+ cmd.extend(str(a) for a in argv if a is not None)
+ print 'Running "%s"' % " ".join(cmd)
+ env = dict(os.environ,
+ YAMLTEST_RPKIC_COUNTER = self.next_rpkic_counter(),
+ RPKI_CONF = self.path("rpki.conf"))
+ subprocess.check_call(cmd, cwd = self.host.path(), env = env)
+
+ def syncdb(self):
+ """
+ Run whatever Django ORM commands are necessary to set up the
+ database this week.
+ """
+
+ # Fork a sub-process for each syncdb/migrate run, because it's
+ # easier than figuring out how to change Django settings after
+ # initialization.
+
+ def sync_settings(settings, verbosity = 1):
+
+ if verbosity > 0:
+ print "Running Django setup for", self.name
+
+ pid = os.fork()
+
+ if pid == 0:
+ logging.getLogger().setLevel(logging.WARNING)
+
+ os.environ.update(RPKI_CONF = self.path("rpki.conf"),
+ DJANGO_SETTINGS_MODULE = "rpki.django_settings." + settings)
+
+ import django
+ django.setup()
+
+ import django.core.management
+ django.core.management.call_command("migrate", verbosity = verbosity, no_color = True,
+ load_initial_data = False, interactive = False)
+
+ if settings in ("gui", "irdb"):
+ from django.contrib.auth.models import User
+ User.objects.create_superuser("root", "root@example.org", "fnord")
+
+ sys.exit(0)
+
+ elif os.waitpid(pid, 0)[1]:
+ raise RuntimeError("Django setup failed for %s %s" % (self.name, settings))
+
+ for settings in ("rpkid", "pubd", "gui"):
+ sync_settings(settings)
+
+ def run_python_daemon(self, prog):
+ """
+ Start a Python daemon and return a subprocess.Popen object
+ representing the running daemon.
+ """
+
+ basename = os.path.splitext(os.path.basename(prog))[0]
+ cmd = [prog, "--foreground", "--log-level", "debug",
+ "--log-file", self.path(basename + ".log")]
+ if args.profile and basename != "rootd":
+ cmd.extend((
+ "--profile", self.path(basename + ".prof")))
+ env = dict(os.environ, RPKI_CONF = self.path("rpki.conf"))
+ p = subprocess.Popen(cmd, cwd = self.path(), env = env)
+ print "Running %s for %s: pid %d process %r" % (" ".join(cmd), self.name, p.pid, p)
+ return p
+
+ def run_rpkid(self):
+ """
+ Run rpkid.
+ """
+
+ return self.run_python_daemon(prog_rpkid)
+
+ def run_irdbd(self):
+ """
+ Run irdbd.
+ """
+
+ return self.run_python_daemon(prog_irdbd)
+
+ def run_pubd(self):
+ """
+ Run pubd.
+ """
+
+ return self.run_python_daemon(prog_pubd)
+
+ def run_rootd(self):
+ """
+ Run rootd.
+ """
+
+ return self.run_python_daemon(prog_rootd)
+
+ def run_rsyncd(self):
+ """
+ Run rsyncd.
+ """
+
+ p = subprocess.Popen(("rsync", "--daemon", "--no-detach", "--config", "rsyncd.conf"),
+ cwd = self.path())
+ print "Running rsyncd for %s: pid %d process %r" % (self.name, p.pid, p)
+ return p
+
+ def run_gui(self):
+ """
+ Start an instance of the RPKI GUI under the Django test server and
+ return a subprocess.Popen object representing the running daemon.
+ """
- def run_pubd(self):
- """
- Run pubd.
- """
- return self.run_python_daemon(prog_pubd)
+ port = 8000 + self.engine
+ cmd = (prog_rpki_manage, "runserver", str(port))
+ env = dict(os.environ,
+ RPKI_CONF = self.path("rpki.conf"),
+ DJANGO_SETTINGS_MODULE = "rpki.django_settings.gui",
+ RPKI_DJANGO_DEBUG = "yes",
+ ALLOW_PLAIN_HTTP_FOR_TESTING = "I solemnly swear that I am not running this in production")
+ p = subprocess.Popen(cmd, cwd = self.path(), env = env,
+ stdout = open(self.path("gui.log"), "w"), stderr = subprocess.STDOUT)
+ print "Running %s for %s: pid %d process %r" % (" ".join(cmd), self.name, p.pid, p)
+ return p
- def run_rootd(self):
- """
- Run rootd.
- """
- return self.run_python_daemon(prog_rootd)
-
- def run_rsyncd(self):
- """
- Run rsyncd.
- """
- p = subprocess.Popen(("rsync", "--daemon", "--no-detach", "--config", "rsyncd.conf"),
- cwd = self.path())
- print "Running rsyncd for %s: pid %d process %r" % (self.name, p.pid, p)
- return p
def create_root_certificate(db_root):
- print "Creating rootd RPKI root certificate"
+ print "Creating rootd RPKI root certificate"
+
+ root_resources = rpki.resource_set.resource_bag(
+ asn = rpki.resource_set.resource_set_as("0-4294967295"),
+ v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"),
+ v6 = rpki.resource_set.resource_set_ipv6("::/0"))
- root_resources = rpki.resource_set.resource_bag(
- asn = rpki.resource_set.resource_set_as("0-4294967295"),
- v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"),
- v6 = rpki.resource_set.resource_set_ipv6("::/0"))
+ root_key = rpki.x509.RSA.generate(quiet = True)
- root_key = rpki.x509.RSA.generate(quiet = True)
+ root_uri = "rsync://localhost:%d/rpki/%s-root/root" % (db_root.pubd.rsync_port, db_root.name)
- root_uri = "rsync://localhost:%d/rpki/" % db_root.pubd.rsync_port
+ rrdp_uri = "http://localhost:%s/rrdp/notify.xml" % db.root.pubd.rrdp_port
- root_sia = (root_uri, root_uri + "root.mft", None)
+ root_sia = (root_uri + "/", root_uri + "/root.mft", None, rrdp_uri)
- root_cert = rpki.x509.X509.self_certify(
- keypair = root_key,
- subject_key = root_key.get_public(),
- serial = 1,
- sia = root_sia,
- notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365),
- resources = root_resources)
+ root_cert = rpki.x509.X509.self_certify(
+ keypair = root_key,
+ subject_key = root_key.get_public(),
+ serial = 1,
+ sia = root_sia,
+ notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365),
+ resources = root_resources)
- f = open(db_root.path("publication.root/root.cer"), "wb")
- f.write(root_cert.get_DER())
- f.close()
+ with open(db_root.path("root.cer"), "wb") as f:
+ f.write(root_cert.get_DER())
- f = open(db_root.path("root.key"), "wb")
- f.write(root_key.get_DER())
- f.close()
+ with open(db_root.path("root.key"), "wb") as f:
+ f.write(root_key.get_DER())
- f = open(os.path.join(test_dir, "root.tal"), "w")
- f.write("rsync://localhost:%d/root/root.cer\n\n" % db_root.pubd.rsync_port)
- f.write(root_key.get_public().get_Base64())
- f.close()
+ with open(os.path.join(test_dir, "root.tal"), "w") as f:
+ f.write(root_uri + ".cer\n\n")
+ f.write(root_key.get_public().get_Base64())
+logger = logging.getLogger(__name__)
-os.environ["TZ"] = "UTC"
+os.environ.update(DJANGO_SETTINGS_MODULE = "rpki.django_settings.irdb",
+ TZ = "UTC")
time.tzset()
parser = argparse.ArgumentParser(description = __doc__)
@@ -662,223 +758,268 @@ parser.add_argument("--synchronize", action = "store_true",
help = "synchronize IRDB with daemons")
parser.add_argument("--profile", action = "store_true",
help = "enable profiling")
+parser.add_argument("-g", "--run_gui", "--gui", action = "store_true",
+ help = "enable GUI using django-admin runserver")
+parser.add_argument("--no-browser", action = "store_true",
+ help = "don't create web browser tabs for GUI")
+parser.add_argument("--notify-when-startup-complete", type = int,
+ help = "send SIGUSR1 to this process when startup is complete")
parser.add_argument("--store-router-private-keys", action = "store_true",
help = "write generate router private keys to disk")
+parser.add_argument("--sql-engine", choices = ("mysql", "sqlite3", "postgresql"), default = "sqlite3",
+ help = "select SQL engine to use")
parser.add_argument("yaml_file", type = argparse.FileType("r"),
help = "YAML description of test network")
args = parser.parse_args()
try:
- if args.pidfile is not None:
- open(args.pidfile, "w").write("%s\n" % os.getpid())
+ if args.pidfile is not None:
+ with open(args.pidfile, "w") as f:
+ print "Writing pidfile", f.name
+ f.write("%s\n" % os.getpid())
- rpki.log.init("yamltest", argparse.Namespace(log_level = logging.DEBUG,
- log_handler = lambda: logging.StreamHandler(sys.stdout)))
+ rpki.log.init("yamltest", argparse.Namespace(log_level = logging.DEBUG,
+ log_handler = lambda: logging.StreamHandler(sys.stdout)))
- # Allow optional config file for this tool to override default
- # passwords: this is mostly so that I can show a complete working
- # example without publishing my own server's passwords.
+ # Allow optional config file for this tool to override default
+ # passwords: this is mostly so that I can show a complete working
+ # example without publishing my own server's passwords.
- cfg = rpki.config.parser(args.config, "yamltest", allow_missing = True)
+ cfg = rpki.config.parser(set_filename = args.config, section = "yamltest", allow_missing = True)
- only_one_pubd = cfg.getboolean("only_one_pubd", True)
- allocation.base_port = cfg.getint("base_port", 4400)
+ only_one_pubd = cfg.getboolean("only_one_pubd", True)
+ allocation.base_port = cfg.getint("base_port", 4400)
- config_overrides = dict(
- (k, cfg.get(k))
- for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password",
- "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username")
- if cfg.has_option(k))
+ config_overrides = dict(
+ (k, cfg.get(k))
+ for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password",
+ "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username")
+ if cfg.has_option(k))
- # Start clean, maybe
+ # Start clean, maybe
- if not args.skip_config:
- for root, dirs, files in os.walk(test_dir, topdown = False):
- for fn in files:
- os.unlink(os.path.join(root, fn))
- for d in dirs:
- os.rmdir(os.path.join(root, d))
+ if not args.skip_config:
+ for root, dirs, files in os.walk(test_dir, topdown = False):
+ for fn in files:
+ os.unlink(os.path.join(root, fn))
+ for d in dirs:
+ os.rmdir(os.path.join(root, d))
- # Read first YAML doc in file and process as compact description of
- # test layout and resource allocations. Ignore subsequent YAML docs,
- # they're for smoketest.py, not this script.
+ # Read first YAML doc in file and process as compact description of
+ # test layout and resource allocations. Ignore subsequent YAML docs,
+ # they're for smoketest.py, not this script.
- db = allocation_db(yaml.safe_load_all(args.yaml_file).next())
+ db = allocation_db(yaml.safe_load_all(args.yaml_file).next())
- # Show what we loaded
+ # Show what we loaded
- #db.dump()
+ #db.dump()
- if args.skip_config:
+ if args.skip_config:
- print "Skipping pre-daemon configuration, assuming you already did that"
+ print "Skipping pre-daemon configuration, assuming you already did that"
- else:
+ else:
- # Set up each entity in our test, create publication directories,
- # and initialize server BPKI.
+ # Set up each entity in our test, create publication directories,
+ # and initialize server BPKI.
- for d in db:
- if not d.is_hosted:
- os.makedirs(d.path())
- d.dump_conf()
- if d.runs_pubd:
- os.makedirs(d.path("publication"))
- d.dump_rsyncd()
- if d.is_root:
- os.makedirs(d.path("publication.root"))
- d.run_rpkic("initialize_server_bpki")
+ for d in db:
+ if not d.is_hosted:
+ print "Initializing", d.name
+ os.makedirs(d.path())
+ d.dump_conf()
+ if d.runs_pubd:
+ os.makedirs(d.path("publication"))
+ d.dump_rsyncd()
+ if d.is_root:
+ os.makedirs(d.path("publication.root"))
+ d.syncdb()
+ d.run_rpkic("initialize_server_bpki")
+ print
- # Initialize resource holding BPKI and generate self-descriptor
- # for each entity.
+ # Initialize resource holding BPKI and generate self-descriptor
+ # for each entity.
- for d in db:
- d.run_rpkic("create_identity", d.name)
+ for d in db:
+ d.run_rpkic("create_identity", d.name)
- # Create RPKI root certificate.
+ # Create RPKI root certificate.
- create_root_certificate(db.root)
+ create_root_certificate(db.root)
- # Set up rootd.
+ # Set up rootd.
- db.root.run_rpkic("configure_root")
+ db.root.run_rpkic("configure_root")
- # From here on we need to pay attention to initialization order. We
- # used to do all the pre-configure_daemons stuff before running any
- # of the daemons, but that doesn't work right in hosted cases, so we
- # have to interleave configuration with starting daemons, just as
- # one would in the real world for this sort of thing.
+ # From here on we need to pay attention to initialization order. We
+ # used to do all the pre-configure_daemons stuff before running any
+ # of the daemons, but that doesn't work right in hosted cases, so we
+ # have to interleave configuration with starting daemons, just as
+ # one would in the real world for this sort of thing.
- progs = []
+ progs = []
- try:
+ try:
- for d in db:
+ for d in db:
- if not d.is_hosted:
- print
- print "Running daemons for", d.name
- if d.is_root:
- progs.append(d.run_rootd())
- progs.append(d.run_irdbd())
- progs.append(d.run_rpkid())
- if d.runs_pubd:
- progs.append(d.run_pubd())
- progs.append(d.run_rsyncd())
-
- if args.synchronize or not args.skip_config:
-
- print
- print "Giving daemons time to start up"
- time.sleep(20)
- assert all(p.poll() is None for p in progs)
+ if not d.is_hosted:
+ print
+ print "Running daemons for", d.name
+ if d.is_root:
+ progs.append(d.run_rootd())
+ progs.append(d.run_irdbd())
+ progs.append(d.run_rpkid())
+ if d.runs_pubd:
+ progs.append(d.run_pubd())
+ progs.append(d.run_rsyncd())
+ if args.run_gui:
+ progs.append(d.run_gui())
- if args.skip_config:
+ if args.synchronize or not args.skip_config:
- print
- print "Skipping configure_*, you'll have to do that yourself if needed"
+ print
+ print "Giving daemons time to start up"
+ time.sleep(20)
+ assert all(p.poll() is None for p in progs)
- else:
+ if args.skip_config:
+
+ print
+ print "Skipping configure_*, you'll have to do that yourself if needed"
- for d in db:
+ else:
+
+ for d in db:
+
+ print
+ print "Configuring", d.name
+ print
+ if d.is_root:
+ assert not d.is_hosted
+ d.run_rpkic("configure_publication_client",
+ "--flat" if args.flat_publication else None,
+ d.path("%s.%s.repository-request.xml" % (d.name, d.name)))
+ print
+ d.run_rpkic("configure_repository",
+ d.path("%s.repository-response.xml" % d.client_handle))
+ print
+ else:
+ d.parent.run_rpkic("configure_child",
+ "--valid_until", d.resources.valid_until,
+ d.path("%s.identity.xml" % d.name))
+ print
+ d.run_rpkic("configure_parent",
+ d.parent.path("%s.%s.parent-response.xml" % (d.parent.name, d.name)))
+ print
+ d.pubd.run_rpkic("configure_publication_client",
+ "--flat" if args.flat_publication else None,
+ d.path("%s.%s.repository-request.xml" % (d.name, d.parent.name)))
+ print
+ d.run_rpkic("configure_repository",
+ d.pubd.path("%s.repository-response.xml" % d.client_handle))
+ print
+
+ print
+ print "Done with initial configuration"
+ print
+
+ if args.synchronize:
+ print
+ print "Synchronizing"
+ print
+ for d in db:
+ if not d.is_hosted:
+ d.run_rpkic("synchronize")
+
+ if args.synchronize or not args.skip_config:
+ print
+ print "Loading CSV files"
+ print
+ for d in db:
+ d.dump_asns()
+ d.dump_prefixes()
+ d.dump_roas()
+ d.dump_ghostbusters()
+ d.dump_router_certificates()
+
+ if args.run_gui:
+ print
+ print 'GUI user "root", password "fnord"'
+ gui_count = 0
+ for d in db:
+ if not d.is_hosted:
+ url = "http://127.0.0.1:%d/rpki/" % (8000 + d.engine)
+ print "GUI URL", url, "for", d.name
+ if not args.no_browser:
+ gui_count += 1
+ if d is db.root:
+ webbrowser.open_new(url)
+ else:
+ webbrowser.open_new_tab(url)
+ time.sleep(2)
+ if gui_count > 1:
+ print "Warning: Logging into more than one GUI instance at once will probably fail due to CSRF protection"
+
+ # Wait until something terminates.
+
+ if not args.stop_after_config or args.keep_going:
+ if args.notify_when_startup_complete:
+ print
+ print "Sending SIGUSR1 to process", args.notify_when_startup_complete
+ os.kill(args.notify_when_startup_complete, signal.SIGUSR1)
+ print
+ print "Waiting for daemons to exit"
+ signal.signal(signal.SIGCHLD, lambda *dont_care: None)
+ while (any(p.poll() is None for p in progs)
+ if args.keep_going else
+ all(p.poll() is None for p in progs)):
+ signal.pause()
+
+ finally:
print
- print "Configuring", d.name
+ print "Shutting down"
print
- if d.is_root:
- assert not d.is_hosted
- d.run_rpkic("configure_publication_client",
- "--flat" if args.flat_publication else None,
- d.path("%s.%s.repository-request.xml" % (d.name, d.name)))
- print
- d.run_rpkic("configure_repository",
- d.path("%s.repository-response.xml" % d.client_handle))
- print
+
+ signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+
+ if args.profile:
+ how_long = 300
else:
- d.parent.run_rpkic("configure_child",
- "--valid_until", d.resources.valid_until,
- d.path("%s.identity.xml" % d.name))
- print
- d.run_rpkic("configure_parent",
- d.parent.path("%s.%s.parent-response.xml" % (d.parent.name, d.name)))
- print
- d.pubd.run_rpkic("configure_publication_client",
- "--flat" if args.flat_publication else None,
- d.path("%s.%s.repository-request.xml" % (d.name, d.parent.name)))
- print
- d.run_rpkic("configure_repository",
- d.pubd.path("%s.repository-response.xml" % d.client_handle))
- print
-
- print
- print "Done with initial configuration"
- print
-
- if args.synchronize:
- print
- print "Synchronizing"
- print
- for d in db:
- if not d.is_hosted:
- d.run_rpkic("synchronize")
-
- if args.synchronize or not args.skip_config:
- print
- print "Loading CSV files"
- print
- for d in db:
- d.dump_asns()
- d.dump_prefixes()
- d.dump_roas()
- d.dump_ghostbusters()
- d.dump_router_certificates()
-
- # Wait until something terminates.
-
- if not args.stop_after_config or args.keep_going:
- print
- print "Waiting for daemons to exit"
- signal.signal(signal.SIGCHLD, lambda *dont_care: None)
- while (any(p.poll() is None for p in progs)
- if args.keep_going else
- all(p.poll() is None for p in progs)):
- signal.pause()
-
- finally:
-
- print
- print "Shutting down"
- print
-
- signal.signal(signal.SIGCHLD, signal.SIG_DFL)
-
- if args.profile:
- how_long = 300
- else:
- how_long = 30
+ how_long = 30
+
+ how_often = how_long / 2
- how_often = how_long / 2
+ for i in xrange(how_long):
+ if i % how_often == 0:
+ for p in progs:
+ if p.poll() is None:
+ print "Politely nudging pid %d" % p.pid
+ p.terminate()
+ print
+ if all(p.poll() is not None for p in progs):
+ break
+ time.sleep(1)
- for i in xrange(how_long):
- if i % how_often == 0:
for p in progs:
- if p.poll() is None:
- print "Politely nudging pid %d" % p.pid
- p.terminate()
- print
- if all(p.poll() is not None for p in progs):
- break
- time.sleep(1)
+ if p.poll() is None:
+ print "Pulling the plug on pid %d" % p.pid
+ p.kill()
- for p in progs:
- if p.poll() is None:
- print "Pulling the plug on pid %d" % p.pid
- p.kill()
+ for p in progs:
+ print "Program pid %d %r returned %d" % (p.pid, p, p.wait())
- for p in progs:
- print "Program pid %d %r returned %d" % (p.pid, p, p.wait())
+except Exception, e:
+ print "Blowing out on exception", str(e)
+ raise
finally:
- if args.pidfile is not None:
- os.unlink(args.pidfile)
+ if args.pidfile is not None and os.path.exists(args.pidfile):
+ os.unlink(args.pidfile)
+
+# Local Variables:
+# indent-tabs-mode: nil
+# End: